Skip to content

Commit

Permalink
merged master && MarketJob Refactor && Test Refactor WIP | fix tests …
Browse files Browse the repository at this point in the history
…asap
  • Loading branch information
tiero committed Nov 24, 2017
1 parent e3f467b commit b344a3f
Show file tree
Hide file tree
Showing 27 changed files with 607 additions and 35 deletions.
91 changes: 91 additions & 0 deletions agent/adapters/aigents/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
#
# agent/adapters/aigents/__init__.py - adapter integrating different sub-services of Aigents web service,
# such as RSS feeding, social graph discovery, summarizing pattern axtraction and entity attribution
#
# Copyright (c) 2017 SingularityNET
#
# Distributed under the MIT software license, see LICENSE file.
#

import requests
import logging
from typing import List

from sn_agent.job.job_descriptor import JobDescriptor
from sn_agent.service_adapter import ServiceAdapterABC
from sn_agent.ontology import Service
from sn_agent.service_adapter import ServiceAdapterABC, ServiceManager

logger = logging.getLogger(__name__)


class AigentsAdapter(ServiceAdapterABC):
type_name = "AigentsAdapter"


def __init__(self, app, service: Service, required_services: List[Service]) -> None:
super().__init__(app, service, required_services)

# Initialize member variables heres.
self.response_template = None

def post_load_initialize(self, service_manager: ServiceManager):
super().post_load_initialize(service_manager)

# Do any agent initialization here.
# TODO
pass


def get_attached_job_data(self, job_item: dict) -> dict:

# Make sure the input type is one we can handle...
input_type = job_item['input_type']
if input_type != 'attached':
logger.error("BAD input dict %s", str(job_item))
raise RuntimeError("Aigents - job item 'input_type' must be 'attached'.")

# Pull the input data from the job item
input_data = job_item['input_data']
if input_data is None:
raise RuntimeError("Agients - job item 'input_data' must be defined.")

return input_data


def perform(self, job: JobDescriptor):
logger.debug("Performing Aigents job.")

# Process the items in the job. The job may include many
results = []
for job_item in job:

# Get the input data for this job.
#TODO actual parameters handling
job_data = self.get_attached_job_data(job_item)
logger.info(job_data)
#job_params = job_data['params']['job_params']
#logger.info('Aigents input'+job_params)
rss_area = job_data['rss_area']

#TODO config
r = requests.get("https://aigents.com/al/?rss%20"+rss_area)
logger.info(r)

if r is None:
raise RuntimeError("Aigents - no response")

output = r.text

# Add the job results to our combined results array for all job items.
single_job_result = {
'adapter_type' : 'aigents',
'service_type' : 'rss',
'response_data': output
}
results.append(single_job_result)

# Return the list of results that come from appending the results for the
# individual job items in the job.
return results

18 changes: 18 additions & 0 deletions agent/adapters/aigents/aigents_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import requests
import logging
from typing import List

from sn_agent.job.job_descriptor import JobDescriptor
from sn_agent.service_adapter import ServiceAdapterABC
from sn_agent.ontology import Service
from sn_agent.service_adapter import ServiceAdapterABC, ServiceManager

logger = logging.getLogger(__name__)

r = requests.get("https://aigents.com/al/?rss%20ai")
logger.info(r.status_code)
logger.info(r.headers)
data = r.text
logger.info(data)
logger.info("Done")

14 changes: 13 additions & 1 deletion agent/alice_config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,18 @@ services:
agents:
- 804be272-1cd8-4e11-8e5d-2eed03d47405

- service: deadbeef-aaaa-bbbb-cccc-000000000000
module: adapters.aigents.AigentsAdapter

- service: deadbeef-aaaa-bbbb-cccc-000000000101
module: examples.simple_adapter.SimpleAdapter

- service: deadbeef-aaaa-bbbb-cccc-111111111101
module: demo.tensorflow_mnist.TensorflowMNIST
module: examples.tensorflow_mnist.TensorflowMNIST

- service: deadbeef-aaaa-bbbb-cccc-000000000202
module: examples.relex.RelexAdapter




1 change: 1 addition & 0 deletions agent/examples/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# Examples module initialization goes here...
129 changes: 129 additions & 0 deletions agent/examples/relex/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
#
# relex/__init__.py - an AI adapter that integrates the relex natural language parser...
#
# Copyright (c) 2017 SingularityNET
#
# Distributed under the MIT software license, see LICENSE file.
#

import logging
from typing import List
import socket
import json
import select
import asyncio

from sn_agent.job.job_descriptor import JobDescriptor
from sn_agent.service_adapter import ServiceAdapterABC
from sn_agent.ontology import Service
from sn_agent.service_adapter import ServiceAdapterABC, ServiceManager

logger = logging.getLogger(__name__)


class RelexAdapter(ServiceAdapterABC):
type_name = "RelexAdapter"


def __init__(self, app, service: Service, required_services: List[Service]) -> None:
super().__init__(app, service, required_services)

# Initialize member variables heres.
self.response_template = None

def post_load_initialize(self, service_manager: ServiceManager):
super().post_load_initialize(service_manager)

def get_attached_job_data(self, job_item: dict) -> dict:

# Make sure the input type is one we can handle...
input_type = job_item['input_type']
if input_type != 'attached':
logger.error("BAD input dict %s", str(job_item))
raise RuntimeError("AgentSimple - job item 'input_type' must be 'attached'.")

# Pull the input data from the job item
input_data = job_item['input_data']
if input_data is None:
raise RuntimeError("AgentSimple - job item 'input_data' must be defined.")

return input_data

def relex_parse_sentence(self, sentence: str) -> dict:

# Open a TCP socket
relex_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
time_out_seconds = 10.0
relex_socket.settimeout(time_out_seconds)
received_message = "NOT RECEIVED"

try:
# Connect to server and send data - note that "relex" below is the way to get to the
# server running in another Docker container. See: docker_compose.yml
relex_socket.connect(("relex", 9000))

# Construct the message for the relex server. NOTE: It expects a "text: " at the
# beginning and a "\n" at the end.
relex_sentence = "text: " + sentence + "\n"

# Send the sentence to the relex server.
relex_socket.sendall(relex_sentence.encode('utf-8'))

# Read the first parts
received_message = relex_socket.recv(1024)

# Strip off the length from the message
if b'\n' in received_message:
length_string, received_message = received_message.split(b'\n', 1)
bytes = int(length_string) - len(length_string)

# Read the rest if we don't already have the full JSON reply.
if bytes > 1024:
received_message = received_message + relex_socket.recv(bytes)

# Decode this since the rest of the system expects unicode strings and not the
# bytes returned from the socket.
received_message = received_message.decode('utf-8')

except socket.timeout:
print("Socket timed out")

finally:
relex_socket.close()

return received_message


def perform(self, job: JobDescriptor):
logger.debug("Performing Relex parse job.")

# Process the items in the job. The job may include many different sentences.
results = []
for job_item in job:

# Get the input data for this job.
job_data = self.get_attached_job_data(job_item)

# Check to make sure you have the data required.
sentence = job_data.get('sentence')
if sentence is None:
raise RuntimeError("RelexAdapter - job item 'input_data' missing 'sentence'")

# Send the sentence to the relex server for parsing.
parsed_sentence = self.relex_parse_sentence(sentence)

# Add the job results to our combined results array for all job items.
single_job_result = {
'relex_parse': parsed_sentence,
}
results.append(single_job_result)

# Return the list of results that come from appending the results for the
# individual job items in the job.
return results






80 changes: 80 additions & 0 deletions agent/examples/simple_adapter/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
#
# simple_adapter/__init__.py - a very simple example service adapter...
#
# Copyright (c) 2017 SingularityNET
#
# Distributed under the MIT software license, see LICENSE file.
#

import logging
from typing import List

from sn_agent.job.job_descriptor import JobDescriptor
from sn_agent.service_adapter import ServiceAdapterABC
from sn_agent.ontology import Service
from sn_agent.service_adapter import ServiceAdapterABC, ServiceManager

logger = logging.getLogger(__name__)


class SimpleAdapter(ServiceAdapterABC):
type_name = "SimpleAdapter"


def __init__(self, app, service: Service, required_services: List[Service]) -> None:
super().__init__(app, service, required_services)

# Initialize member variables heres.
self.response_template = None

def post_load_initialize(self, service_manager: ServiceManager):
super().post_load_initialize(service_manager)

# Do any agent initialization here.
self.response_template = "This AI takes the input and places it at the end: '{0}'."


def get_attached_job_data(self, job_item: dict) -> dict:

# Make sure the input type is one we can handle...
input_type = job_item['input_type']
if input_type != 'attached':
logger.error("BAD input dict %s", str(job_item))
raise RuntimeError("SimpleAdapter - job item 'input_type' must be 'attached'.")

# Pull the input data from the job item
input_data = job_item['input_data']
if input_data is None:
raise RuntimeError("SimpleAdapter - job item 'input_data' must be defined.")

return input_data


def perform(self, job: JobDescriptor):
logger.debug("Performing SimpleAdapter job.")

# Process the items in the job. The job may include many
results = []
for job_item in job:

# Get the input data for this job.
job_data = self.get_attached_job_data(job_item)

# Check to make sure you have the data required.
simple_text = job_data.get('simple_text')
if simple_text is None:
raise RuntimeError("SimpleAdapter - job item 'input_data' missing 'simple_text'")

# Do the work... in this case we're just doing a simple text substitution into
# our response template.
simple_sentence = self.response_template.format(simple_text)

# Add the job results to our combined results array for all job items.
single_job_result = {
'simple_sentence': simple_sentence,
}
results.append(single_job_result)

# Return the list of results that come from appending the results for the
# individual job items in the job.
return results
File renamed without changes.
1 change: 1 addition & 0 deletions agent/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -28,3 +28,4 @@ bson
tensorflow

web3
requests
21 changes: 20 additions & 1 deletion agent/sn_agent/ontology/ontology_mock.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,27 @@ services:
name: JSON RPC Service Two
description: Sample JSON RPC service number two


# singnet/examples/tensorflow_mnist - an example Tensorflow MNIST classifier
- service:
ontology_node_id: deadbeef-aaaa-bbbb-cccc-111111111101
name: Tensorflow MNIST Classifier
description: Classifies MNIST input handwritten number images

# singnet/examples/agent_simple - a simple example agent to show how to integrate AI code
- service:
ontology_node_id: deadbeef-aaaa-bbbb-cccc-000000000101
name: Agent Simple
description: Example agent that inserts text into a template.

# singnet/examples/relex - an adapter for the relex OpenCog narror AI
- service:
ontology_node_id: deadbeef-aaaa-bbbb-cccc-000000000202
name: Relex Parser
description: An English-language semantic dependency relationship extractor, built on the Carnegie-Mellon Link Grammar parser.

# singnet/agent/adapters/aigents - an agent proxy to Aigents services
- service:
ontology_node_id: deadbeef-aaaa-bbbb-cccc-000000000000
name: Aigents Services
description: Aigents services

Loading

0 comments on commit b344a3f

Please sign in to comment.