How to train Watson NLC with data from SQL Table (DashDB) and use twitter to classify and respond

In this flow Watson NLC is trained with a data that come from SQL Table (DashDB), two columns, texto and one classification, and the classification is requested thru twitter, which give the response to the author.

[{"id":"b706d874.fe0868","type":"watson-natural-language-classifier","z":"d69c9d5.0c60b6","name":"","mode":"create","language":"pt","classifier":"","x":605.6666259765625,"y":361.9513854980469,"wires":[["5d8a8ed0.281de"]]},{"id":"6c8c0980.165fc8","type":"function","z":"d69c9d5.0c60b6","name":"Array de nomes","func":"var aux = msg.payload.length;\nvar origens = [];\n\nfor(var x = 0;x < aux; x++)\n{\n    origens[x] = { text: msg.payload[x].NOME, classes:[msg.payload[x].ORIGEM]};\n}\n\nmsg.payload = origens;\nreturn msg;","outputs":1,"noerr":0,"x":390.6666259765625,"y":362.9513854980469,"wires":[["b706d874.fe0868","dfbb5e05.7f386"]]},{"id":"5d8a8ed0.281de","type":"debug","z":"d69c9d5.0c60b6","name":"","active":true,"console":"true","complete":"payload","x":850.6666107177734,"y":246.95138549804688,"wires":[]},{"id":"3a0b53d1.ad0aec","type":"watson-natural-language-classifier","z":"d69c9d5.0c60b6","name":"","mode":"classify","language":"en","classifier":"c115edx72-nlc-2793","x":414.6666259765625,"y":146.95138549804688,"wires":[["bf06d4f2.5ac378","5d8a8ed0.281de","c0b1281.75310d8"]]},{"id":"a8134bb7.c2b6c8","type":"comment","z":"d69c9d5.0c60b6","name":"Training process","info":"{ \"classifier_id\": \"3a84cfx63-nlc-1619\", \n\"name\": null, \"language\": \"pt\", \n\"created\": \"2016-04-29T16:34:14.064Z\", \n\"url\": \n\"https://gateway.watsonplatform.net/natural-language-classifier/api/v1/classifiers/3a84cfx63-nlc-1619\", \n\"status\": \"Training\", \"status_description\": \n\"The classifier instance is in its training phase, not yet ready to accept classify requests\" }","x":91.6666259765625,"y":383.95140838623047,"wires":[]},{"id":"46360735.cf3108","type":"change","z":"d69c9d5.0c60b6","name":"","rules":[{"t":"change","p":"payload","pt":"msg","from":"#watsonnomeorigem","fromt":"str","to":"","tot":"str"},{"t":"change","p":"tweet.text","pt":"msg","from":"#watsonnomeorigem","fromt":"str","to":"","tot":"str"}],"action":"","property":"","from":"","to":"","reg":false,"x":418.6666259765625,"y":76.95138549804688,"wires":[["3a0b53d1.ad0aec"]]},{"id":"8044cfc5.54bc3","type":"inject","z":"d69c9d5.0c60b6","name":"","topic":"","payload":"Rachel","payloadType":"str","repeat":"","crontab":"","once":false,"x":145.6666259765625,"y":146.95138549804688,"wires":[["3a0b53d1.ad0aec"]]},{"id":"bf06d4f2.5ac378","type":"function","z":"d69c9d5.0c60b6","name":"Resposta","func":"var text;\nif(msg.tweet)\n{\n    text = \"@\" + msg.tweet.user.screen_name + \", O nome\" + msg.tweet.text + \" parece-me ser de origem \";\n}\nelse\n{\n    text = \"Meu amigo, O nome\" + msg.payload + \" parece-me ser de origem \";\n}\ntext += msg.payload.classes[0].class_name + \" (\" + \n            Math.round( msg.payload.classes[0].confidence * 100 ) + \"%)\" + String.fromCharCode(13);\nmsg.payload = text;\nreturn msg;","outputs":1,"noerr":0,"x":626.6666259765625,"y":145.95138549804688,"wires":[["5d8a8ed0.281de","186589bf.3bef96"]]},{"id":"424219a4.e6e3a8","type":"http in","z":"d69c9d5.0c60b6","name":"","url":"/nomeorigem","method":"get","swaggerDoc":"","x":114.95135498046875,"y":257.74999237060547,"wires":[["8fe184fc.486178"]]},{"id":"8fe184fc.486178","type":"change","z":"d69c9d5.0c60b6","name":"","rules":[{"t":"set","p":"payload","pt":"msg","to":"payload.nome","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":360.9513244628906,"y":258.7569580078125,"wires":[["3a0b53d1.ad0aec"]]},{"id":"c0b1281.75310d8","type":"function","z":"d69c9d5.0c60b6","name":"","func":"msg.payload=msg.payload.classes[1].class_name + \" (\" + Math.round( msg.payload.classes[1].confidence * 100 ) + \"%)\";\nreturn msg;","outputs":1,"noerr":0,"x":552.9409332275391,"y":258.7326354980469,"wires":[["9f7e91c3.f4fce"]]},{"id":"9f7e91c3.f4fce","type":"http response","z":"d69c9d5.0c60b6","name":"","x":680.9443817138672,"y":259.1493225097656,"wires":[]},{"id":"5dd17c62.4ed854","type":"watson-natural-language-classifier","z":"d69c9d5.0c60b6","name":"","mode":"list","language":"pt","classifier":"","x":563.7916450500488,"y":495.20142364501953,"wires":[["5d8a8ed0.281de"]]},{"id":"1a4f7ac1.4c4795","type":"inject","z":"d69c9d5.0c60b6","name":"Lista Classifiers","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"x":297.5416259765625,"y":496.20140838623047,"wires":[["5dd17c62.4ed854"]]},{"id":"54824d4b.a08164","type":"twitter in","z":"d69c9d5.0c60b6","twitter":"","tags":"#watsonnomeorigem","user":"false","name":"","topic":"tweets","x":153.93746948242188,"y":77,"wires":[["46360735.cf3108"]]},{"id":"186589bf.3bef96","type":"twitter out","z":"d69c9d5.0c60b6","twitter":"","name":"Tweet","x":779.9443817138672,"y":88.40972137451172,"wires":[]},{"id":"79cbd948.239de8","type":"dashDB in","z":"d69c9d5.0c60b6","service":"dashDB-0r","query":"SELECT * FROM NOMES_E_ORIGENS","params":"","name":"","x":274.8333435058594,"y":432.33341217041016,"wires":[["dfbb5e05.7f386","6c8c0980.165fc8"]]},{"id":"5f7b1ec0.ec312","type":"inject","z":"d69c9d5.0c60b6","name":"","topic":"","payload":"","payloadType":"date","repeat":"","crontab":"","once":false,"x":95.83332824707031,"y":431.33336639404297,"wires":[["79cbd948.239de8"]]},{"id":"dfbb5e05.7f386","type":"debug","z":"d69c9d5.0c60b6","name":"","active":true,"console":"false","complete":"false","x":854.8332824707031,"y":427.66670989990234,"wires":[]}]
sergiogama

Flow Info

created 1 year, 4 months ago

Node Types

Core
  • change (x2)
  • comment (x1)
  • debug (x2)
  • function (x3)
  • http in (x1)
  • http response (x1)
  • inject (x3)
  • twitter in (x1)
  • twitter out (x1)
Other

Tags

  • NLC
  • Twitter
  • DashDB
  • HTTP
Copy this flow JSON to your clipboard and then import into Node-RED using the Import From > Clipboard (Ctrl-I) menu option