diff --git a/.gitignore b/.gitignore index c556d5b3..982a3bfb 100644 --- a/.gitignore +++ b/.gitignore @@ -390,3 +390,4 @@ FodyWeavers.xsd /src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/assets/yolov5m.onnx /Installers/downloads *.DS_Store +/src/AnalysisLayer/BackgroundRemover/models diff --git a/.vscode/tasks.json b/.vscode/tasks.json index fc41182d..c2786140 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -31,6 +31,20 @@ "problemMatcher": "$msCompile" }, + { + "label": "build-portraitfilter", // Builds ONLY the Yolo .NET analysis module + "type": "process", + "group": "build", + "command": "dotnet", + "args": [ + "build", + "${workspaceFolder}/src/AnalysisLayer/PortraitFilter", + "/property:GenerateFullPaths=true", + "/consoleloggerparameters:NoSummary" + ], + "problemMatcher": "$msCompile" + }, + { "label": "build-playground", // Builds ONLY the Yolo .NET analysis module "type": "process", @@ -143,6 +157,7 @@ "dependsOn": [ "build-common", "build-yolo", + "build-portraitfilter", "build-server" ] }, @@ -194,6 +209,7 @@ "dependsOrder": "sequence", "dependsOn": [ "build-yolo", + "build-portraitfilter", "build-server", "start-api" ] @@ -205,6 +221,7 @@ "dependsOrder": "sequence", "dependsOn": [ "build-yolo", + "build-portraitfilter", "build-server", "launch-analysis-linux" ] @@ -218,6 +235,7 @@ "dependsOn": [ "build-common", "build-yolo", + "build-portraitfilter", "build-server", "build-playground" ] diff --git a/CodeProject.SenseAI.sln b/CodeProject.SenseAI.sln index a7642e92..77a1317d 100644 --- a/CodeProject.SenseAI.sln +++ b/CodeProject.SenseAI.sln @@ -5,6 +5,7 @@ VisualStudioVersion = 17.1.32210.238 MinimumVisualStudioVersion = 10.0.40219.1 Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{A8B76501-496A-4011-9C37-8308A1EBDFA7}" ProjectSection(SolutionItems) = preProject + src\clean.bat = src\clean.bat src\start.bat = src\start.bat src\start.sh = src\start.sh EndProjectSection @@ -35,8 +36,6 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AnalysisLayer", "AnalysisLa EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Installers", "Installers", "{D885EE64-C1BD-44D6-84D8-1E46806298D9}" EndProject -Project("{888888A0-9F3D-457C-B088-3A5042F75D52}") = "DeepStack", "src\AnalysisLayer\DeepStack\intelligencelayer\DeepStack.pyproj", "{E5D27495-EE4F-4AAF-8749-A6BA848111E2}" -EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Javascript", "Javascript", "{3A860CDD-94B9-4002-BA08-87E8822DDE50}" ProjectSection(SolutionItems) = preProject demos\Javascript\Vision.html = demos\Javascript\Vision.html @@ -70,7 +69,6 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "TestData", "TestData", "{B1 demos\TestData\pexels-pixabay-273935.jpg = demos\TestData\pexels-pixabay-273935.jpg demos\TestData\pexels-polina-tankilevitch-5848781.jpg = demos\TestData\pexels-polina-tankilevitch-5848781.jpg demos\TestData\pexels-roberto-nickson-2559941.jpg = demos\TestData\pexels-roberto-nickson-2559941.jpg - demos\TestData\pexels-thirdman-7268587 (1).jpg = demos\TestData\pexels-thirdman-7268587 (1).jpg demos\TestData\pexels-thirdman-7652055.jpg = demos\TestData\pexels-thirdman-7652055.jpg demos\TestData\pexels-tiger-lily-4480988.jpg = demos\TestData\pexels-tiger-lily-4480988.jpg demos\TestData\pexels-tiger-lily-4481324.jpg = demos\TestData\pexels-tiger-lily-4481324.jpg @@ -94,6 +92,8 @@ EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Frontend", "src\API\Server\FrontEnd\Frontend.csproj", "{A0CF3BEE-8EE3-4B23-BEB8-9D258C0510A3}" ProjectSection(ProjectDependencies) = postProject {232710A8-9180-4139-8FF2-8F21F649D927} = {232710A8-9180-4139-8FF2-8F21F649D927} + {7DC0A312-39AD-41B0-BB53-72ACACC76959} = {7DC0A312-39AD-41B0-BB53-72ACACC76959} + {AB47CC99-15ED-4928-A2F2-DC8F83DA9482} = {AB47CC99-15ED-4928-A2F2-DC8F83DA9482} EndProjectSection EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Backend", "src\API\Server\Backend\Backend.csproj", "{C93C22D7-4EB2-4EC0-A7F0-FBCFB9F6F72D}" @@ -132,7 +132,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution global.json = global.json EndProjectSection EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "CodeProject.SenseAI.AnalysisLayer.Yolo", "src\AnalysisLayer\CodeProject.SenseAI.AnalysisLayer.Yolo\CodeProject.SenseAI.AnalysisLayer.Yolo.csproj", "{232710A8-9180-4139-8FF2-8F21F649D927}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ObjectDetector", "src\AnalysisLayer\CodeProject.SenseAI.AnalysisLayer.Yolo\ObjectDetector.csproj", "{232710A8-9180-4139-8FF2-8F21F649D927}" EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SDK", "SDK", "{FF0C329F-41E8-4540-BCDB-97690911077D}" EndProject @@ -162,9 +162,9 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Common", "Common", "{5F1052 EndProject Project("{930C7802-8A8C-48F9-8165-68863BCCD9DD}") = "SenseAI.BootStrapper", "Installers\Windows\SenseAI.BootStrapper\SenseAI.BootStrapper.wixproj", "{C04BBD0D-FD36-4FA4-805B-106BCCD9BC79}" ProjectSection(ProjectDependencies) = postProject - {ED492608-6013-4552-A29B-A7E14F4BEB0B} = {ED492608-6013-4552-A29B-A7E14F4BEB0B} - {D0B74BE9-8195-4907-AEEE-45631E37251F} = {D0B74BE9-8195-4907-AEEE-45631E37251F} {3F34EAEE-8A53-40D1-8CDB-A13AE728FA06} = {3F34EAEE-8A53-40D1-8CDB-A13AE728FA06} + {D0B74BE9-8195-4907-AEEE-45631E37251F} = {D0B74BE9-8195-4907-AEEE-45631E37251F} + {ED492608-6013-4552-A29B-A7E14F4BEB0B} = {ED492608-6013-4552-A29B-A7E14F4BEB0B} EndProjectSection EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Old School Scripts", "Old School Scripts", "{FDB25B74-D0CD-4665-9152-DE968A1028D8}" @@ -208,6 +208,38 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Docker", "Docker", "{FB0561 EndProject Project("{888888A0-9F3D-457C-B088-3A5042F75D52}") = "TextSummary", "src\AnalysisLayer\TextSummary\TextSummary.pyproj", "{470D3417-36A4-49A4-B719-496466FA92FB}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Articles", "Articles", "{2E9D03C1-BF8F-4E3D-944A-0DDEDDE620C5}" + ProjectSection(SolutionItems) = preProject + docs\Articles\adding-a-module.md = docs\Articles\adding-a-module.md + docs\Articles\adding-a-net6-module.md = docs\Articles\adding-a-net6-module.md + docs\Articles\dog_and_man_rembg.png = docs\Articles\dog_and_man_rembg.png + docs\Articles\How-to-add-AI-to-an-app.md = docs\Articles\How-to-add-AI-to-an-app.md + docs\Articles\pexels-thirdman-7268587.jpg = docs\Articles\pexels-thirdman-7268587.jpg + docs\Articles\test.html.jpg = docs\Articles\test.html.jpg + EndProjectSection +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Python", "Python", "{D2B223B6-983B-4782-B231-5F481504A86F}" + ProjectSection(SolutionItems) = preProject + src\AnalysisLayer\SDK\Python\requirements.txt = src\AnalysisLayer\SDK\Python\requirements.txt + src\AnalysisLayer\SDK\Python\senseAI.py = src\AnalysisLayer\SDK\Python\senseAI.py + EndProjectSection +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "NET", "NET", "{11F66210-D711-4A0A-AE3A-8AC11432CDC3}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PortraitFilter", "src\AnalysisLayer\PortraitFilter\PortraitFilter.csproj", "{AB47CC99-15ED-4928-A2F2-DC8F83DA9482}" +EndProject +Project("{888888A0-9F3D-457C-B088-3A5042F75D52}") = "BackgroundRemover", "src\AnalysisLayer\BackgroundRemover\BackgroundRemover.pyproj", "{470D3417-36A4-49A4-B719-496466FA92FC}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "CodeProject.SenseAI.AnalysisLayer.SDK", "src\AnalysisLayer\SDK\NET\CodeProject.SenseAI.AnalysisLayer.SDK\CodeProject.SenseAI.AnalysisLayer.SDK.csproj", "{56DFAAD2-2E14-45D8-8BC9-D171785235E9}" +EndProject +Project("{888888A0-9F3D-457C-B088-3A5042F75D52}") = "Vision", "src\AnalysisLayer\DeepStack\intelligencelayer\Vision.pyproj", "{E5D27495-EE4F-4AAF-8749-A6BA848111E2}" +EndProject +Project("{930C7802-8A8C-48F9-8165-68863BCCD9DD}") = "BackgroundRemover.Installer", "Installers\Windows\BackgroundRemover.Installer\BackgroundRemover.Installer.wixproj", "{7E70C376-7119-4C90-AC30-344EEA29594E}" +EndProject +Project("{930C7802-8A8C-48F9-8165-68863BCCD9DD}") = "PortraitFilter.Installer", "Installers\Windows\PortraitFilter.Installer\PortraitFilter.Installer.wixproj", "{476475CE-3C81-446B-914E-AC472D41B82A}" +EndProject +Project("{930C7802-8A8C-48F9-8165-68863BCCD9DD}") = "Python39.Installer", "Installers\Windows\Python39.Installer\Python39.Installer.wixproj", "{0DBC85BC-52C3-491F-90F9-6D728FFA2E8F}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -220,10 +252,6 @@ Global {25750BF1-1502-4F65-8D69-CEA8C87D6446}.Debug|x86.ActiveCfg = Debug|Any CPU {25750BF1-1502-4F65-8D69-CEA8C87D6446}.Release|Any CPU.ActiveCfg = Release|Any CPU {25750BF1-1502-4F65-8D69-CEA8C87D6446}.Release|x86.ActiveCfg = Release|Any CPU - {E5D27495-EE4F-4AAF-8749-A6BA848111E2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E5D27495-EE4F-4AAF-8749-A6BA848111E2}.Debug|x86.ActiveCfg = Debug|Any CPU - {E5D27495-EE4F-4AAF-8749-A6BA848111E2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E5D27495-EE4F-4AAF-8749-A6BA848111E2}.Release|x86.ActiveCfg = Release|Any CPU {C33D90E7-7570-46FB-9EB9-ED6B40A93A9B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {C33D90E7-7570-46FB-9EB9-ED6B40A93A9B}.Debug|Any CPU.Build.0 = Debug|Any CPU {C33D90E7-7570-46FB-9EB9-ED6B40A93A9B}.Debug|x86.ActiveCfg = Debug|Any CPU @@ -328,6 +356,54 @@ Global {470D3417-36A4-49A4-B719-496466FA92FB}.Debug|x86.ActiveCfg = Debug|Any CPU {470D3417-36A4-49A4-B719-496466FA92FB}.Release|Any CPU.ActiveCfg = Release|Any CPU {470D3417-36A4-49A4-B719-496466FA92FB}.Release|x86.ActiveCfg = Release|Any CPU + {AB47CC99-15ED-4928-A2F2-DC8F83DA9482}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {AB47CC99-15ED-4928-A2F2-DC8F83DA9482}.Debug|Any CPU.Build.0 = Debug|Any CPU + {AB47CC99-15ED-4928-A2F2-DC8F83DA9482}.Debug|x86.ActiveCfg = Debug|Any CPU + {AB47CC99-15ED-4928-A2F2-DC8F83DA9482}.Debug|x86.Build.0 = Debug|Any CPU + {AB47CC99-15ED-4928-A2F2-DC8F83DA9482}.Release|Any CPU.ActiveCfg = Release|Any CPU + {AB47CC99-15ED-4928-A2F2-DC8F83DA9482}.Release|Any CPU.Build.0 = Release|Any CPU + {AB47CC99-15ED-4928-A2F2-DC8F83DA9482}.Release|x86.ActiveCfg = Release|Any CPU + {AB47CC99-15ED-4928-A2F2-DC8F83DA9482}.Release|x86.Build.0 = Release|Any CPU + {470D3417-36A4-49A4-B719-496466FA92FC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {470D3417-36A4-49A4-B719-496466FA92FC}.Debug|x86.ActiveCfg = Debug|Any CPU + {470D3417-36A4-49A4-B719-496466FA92FC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {470D3417-36A4-49A4-B719-496466FA92FC}.Release|x86.ActiveCfg = Release|Any CPU + {56DFAAD2-2E14-45D8-8BC9-D171785235E9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {56DFAAD2-2E14-45D8-8BC9-D171785235E9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {56DFAAD2-2E14-45D8-8BC9-D171785235E9}.Debug|x86.ActiveCfg = Debug|Any CPU + {56DFAAD2-2E14-45D8-8BC9-D171785235E9}.Debug|x86.Build.0 = Debug|Any CPU + {56DFAAD2-2E14-45D8-8BC9-D171785235E9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {56DFAAD2-2E14-45D8-8BC9-D171785235E9}.Release|Any CPU.Build.0 = Release|Any CPU + {56DFAAD2-2E14-45D8-8BC9-D171785235E9}.Release|x86.ActiveCfg = Release|Any CPU + {56DFAAD2-2E14-45D8-8BC9-D171785235E9}.Release|x86.Build.0 = Release|Any CPU + {E5D27495-EE4F-4AAF-8749-A6BA848111E2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E5D27495-EE4F-4AAF-8749-A6BA848111E2}.Debug|x86.ActiveCfg = Debug|Any CPU + {E5D27495-EE4F-4AAF-8749-A6BA848111E2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E5D27495-EE4F-4AAF-8749-A6BA848111E2}.Release|x86.ActiveCfg = Release|Any CPU + {7E70C376-7119-4C90-AC30-344EEA29594E}.Debug|Any CPU.ActiveCfg = Debug|x86 + {7E70C376-7119-4C90-AC30-344EEA29594E}.Debug|Any CPU.Build.0 = Debug|x86 + {7E70C376-7119-4C90-AC30-344EEA29594E}.Debug|x86.ActiveCfg = Debug|x86 + {7E70C376-7119-4C90-AC30-344EEA29594E}.Debug|x86.Build.0 = Debug|x86 + {7E70C376-7119-4C90-AC30-344EEA29594E}.Release|Any CPU.ActiveCfg = Release|x86 + {7E70C376-7119-4C90-AC30-344EEA29594E}.Release|Any CPU.Build.0 = Release|x86 + {7E70C376-7119-4C90-AC30-344EEA29594E}.Release|x86.ActiveCfg = Release|x86 + {7E70C376-7119-4C90-AC30-344EEA29594E}.Release|x86.Build.0 = Release|x86 + {476475CE-3C81-446B-914E-AC472D41B82A}.Debug|Any CPU.ActiveCfg = Debug|x86 + {476475CE-3C81-446B-914E-AC472D41B82A}.Debug|Any CPU.Build.0 = Debug|x86 + {476475CE-3C81-446B-914E-AC472D41B82A}.Debug|x86.ActiveCfg = Debug|x86 + {476475CE-3C81-446B-914E-AC472D41B82A}.Debug|x86.Build.0 = Debug|x86 + {476475CE-3C81-446B-914E-AC472D41B82A}.Release|Any CPU.ActiveCfg = Release|x86 + {476475CE-3C81-446B-914E-AC472D41B82A}.Release|Any CPU.Build.0 = Release|x86 + {476475CE-3C81-446B-914E-AC472D41B82A}.Release|x86.ActiveCfg = Release|x86 + {476475CE-3C81-446B-914E-AC472D41B82A}.Release|x86.Build.0 = Release|x86 + {0DBC85BC-52C3-491F-90F9-6D728FFA2E8F}.Debug|Any CPU.ActiveCfg = Debug|x86 + {0DBC85BC-52C3-491F-90F9-6D728FFA2E8F}.Debug|Any CPU.Build.0 = Debug|x86 + {0DBC85BC-52C3-491F-90F9-6D728FFA2E8F}.Debug|x86.ActiveCfg = Debug|x86 + {0DBC85BC-52C3-491F-90F9-6D728FFA2E8F}.Debug|x86.Build.0 = Debug|x86 + {0DBC85BC-52C3-491F-90F9-6D728FFA2E8F}.Release|Any CPU.ActiveCfg = Release|x86 + {0DBC85BC-52C3-491F-90F9-6D728FFA2E8F}.Release|Any CPU.Build.0 = Release|x86 + {0DBC85BC-52C3-491F-90F9-6D728FFA2E8F}.Release|x86.ActiveCfg = Release|x86 + {0DBC85BC-52C3-491F-90F9-6D728FFA2E8F}.Release|x86.Build.0 = Release|x86 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -336,7 +412,6 @@ Global {25750BF1-1502-4F65-8D69-CEA8C87D6446} = {7F18EB64-C857-49C4-9380-70D3CCE6242B} {2379A486-0D28-4CAD-BB13-E77FBA538E0D} = {A8B76501-496A-4011-9C37-8308A1EBDFA7} {156BFEDA-D477-43B2-92DA-FCC9BAF1F893} = {A8B76501-496A-4011-9C37-8308A1EBDFA7} - {E5D27495-EE4F-4AAF-8749-A6BA848111E2} = {156BFEDA-D477-43B2-92DA-FCC9BAF1F893} {3A860CDD-94B9-4002-BA08-87E8822DDE50} = {7F18EB64-C857-49C4-9380-70D3CCE6242B} {1912618E-7942-4BB0-BC2B-7F48A38D1049} = {7F18EB64-C857-49C4-9380-70D3CCE6242B} {B10B59B5-9F63-41C2-BFBB-6C7311DC4E99} = {7F18EB64-C857-49C4-9380-70D3CCE6242B} @@ -367,6 +442,16 @@ Global {F675BEA0-4A75-4B30-9E70-CBBE8641E9CD} = {83C828B9-2B1E-4982-B4B7-69D173DFBB27} {FB0561D3-4AF8-415A-85B4-E4E9ADDC3DB2} = {D885EE64-C1BD-44D6-84D8-1E46806298D9} {470D3417-36A4-49A4-B719-496466FA92FB} = {156BFEDA-D477-43B2-92DA-FCC9BAF1F893} + {2E9D03C1-BF8F-4E3D-944A-0DDEDDE620C5} = {3BF27572-9D3A-497C-8375-29110EBBD3D1} + {D2B223B6-983B-4782-B231-5F481504A86F} = {FF0C329F-41E8-4540-BCDB-97690911077D} + {11F66210-D711-4A0A-AE3A-8AC11432CDC3} = {FF0C329F-41E8-4540-BCDB-97690911077D} + {AB47CC99-15ED-4928-A2F2-DC8F83DA9482} = {156BFEDA-D477-43B2-92DA-FCC9BAF1F893} + {470D3417-36A4-49A4-B719-496466FA92FC} = {156BFEDA-D477-43B2-92DA-FCC9BAF1F893} + {56DFAAD2-2E14-45D8-8BC9-D171785235E9} = {11F66210-D711-4A0A-AE3A-8AC11432CDC3} + {E5D27495-EE4F-4AAF-8749-A6BA848111E2} = {156BFEDA-D477-43B2-92DA-FCC9BAF1F893} + {7E70C376-7119-4C90-AC30-344EEA29594E} = {83C828B9-2B1E-4982-B4B7-69D173DFBB27} + {476475CE-3C81-446B-914E-AC472D41B82A} = {83C828B9-2B1E-4982-B4B7-69D173DFBB27} + {0DBC85BC-52C3-491F-90F9-6D728FFA2E8F} = {83C828B9-2B1E-4982-B4B7-69D173DFBB27} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {83740BD9-AEEF-49C7-A722-D7703D3A38CB} diff --git a/Installers/Dev/setup_dev_env_linux.sh b/Installers/Dev/setup_dev_env_linux.sh index f743f621..07569b7c 100644 --- a/Installers/Dev/setup_dev_env_linux.sh +++ b/Installers/Dev/setup_dev_env_linux.sh @@ -183,7 +183,7 @@ function isDarkMode () { # uses the system default # string Background color name. Optional. Defaults to $color_background which is set based on the # current terminal background -function WriteLine () { +function writeLine () { local resetColor='\033[0m' @@ -212,7 +212,7 @@ function WriteLine () { # uses the system default # string Background color name. Optional. Defaults to $color_background which is set based on the # current terminal background -function Write () { +function write () { local resetColor='\033[0m' @@ -241,37 +241,408 @@ function checkForTool () { return fi - WriteLine - WriteLine - WriteLine "------------------------------------------------------------------------" - WriteLine "Error: ${name} is not installed on your system" $color_error + writeLine + writeLine + writeLine "------------------------------------------------------------------------" + writeLine "Error: ${name} is not installed on your system" $color_error - if [ "$platform" == "osx" ]; then - WriteLine " Please run 'brew install ${name}'" $color_error + if [ "$platform" == "macos" ]; then + writeLine " Please run 'brew install ${name}'" $color_error if ! command -v brew &> /dev/null; then - WriteLine - WriteLine "Error: It looks like you don't have brew installed either" $color_warn - WriteLine " Please run:" $color_warn - WriteLine " /bin/bash -c '$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)'" $color_warn + writeLine + writeLine "Error: It looks like you don't have brew installed either" $color_warn + writeLine " Please run:" $color_warn + writeLine " /bin/bash -c '$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)'" $color_warn quit fi else - WriteLine " Please run 'sudo apt install ${name}'" $color_error + writeLine " Please run 'sudo apt install ${name}'" $color_error fi - WriteLine - WriteLine + writeLine + writeLine quit } +function setupPython () { + + # M1 macs are trouble for python + if [ "$platform" == "macos" ] && [[ $(uname -p) == 'arm' ]]; then + write "ARM (Apple silicon) Mac detected, but we are not running under Rosetta. " $color_warn + if [ $(/usr/bin/pgrep oahd >/dev/null 2>&1; echo $?) -gt 0 ]; then + #if [ "$(pkgutil --files com.apple.pkg.RosettaUpdateAuto)" == "" ]; then + writeLine 'Rosetta is not installed' $color_error + needRosettaAndiBrew + else + writeLine 'Rosetta is installed. We can continue.' $color_success + fi + fi + + local pythonVersion=$1 + + # Version with ".'s removed + local pythonName="python${pythonVersion/./}" + + installPath="${analysisLayerPath}/bin/${platform}/${pythonName}" + + if [ "${forceOverwrite}" == "true" ]; then + + if [ ! $verbosity == "quiet" ]; then + writeLine "Cleaning download directory to force re-install of Python" $color_info + fi + + # Force Re-download + if [ -d "${downloadPath}/${platform}/${pythonName}" ]; then + rm -rf "${downloadPath}/${platform}/${pythonName}" + fi + + # Force overwrite + if [ -d "${installPath}" ]; then + rm -rf "${installPath}" + fi + fi + + # ============================================================================ + # 1. Install Python. Using deadsnakes for Linux (not macOS), so be aware if you have concerns + # about potential late adoption of security patches. + + if [ $verbosity == "loud" ]; then + writeLine "Python install path is ${installPath}" $color_info + fi + + if [ ! -d "${installPath}" ]; then + if [ "$platform" == "macos" ]; then + mkdir -p "${installPath}" + else + mkdir -p "${installPath}" + fi + fi + + pythonCmd="python${pythonVersion}" + if command -v $pythonCmd &> /dev/null; then + writeLine "Python ${pythonVersion} is already installed" $color_success + else + + # For macOS we'll use brew to install python + if [ "$platform" == "macos" ]; then + + write "Installing Python ${pythonVersion}..." $color_primary + + if [[ $(uname -p) == 'arm' ]]; then + + # Apple silicon requires Rosetta2 for python to run, so use the x86 version of Brew + # we installed earlier + if [ "${verbosity}" == "quiet" ]; then + arch -x86_64 /usr/local/bin/brew install python@${pythonVersion} >/dev/null 2>/dev/null & + spin $! + else + arch -x86_64 /usr/local/bin/brew install python@${pythonVersion} + fi + + # Note that we only need the specific location of the python interpreter to setup the + # virtual environment. After it's setup, all python calls are relative to the same venv + # no matter the location of the original python interpreter + pythonCmd="/usr/local/opt/python@${pythonVersion}/bin/python${pythonVersion}" + + else + + # We have a x64 version of python for macOS in our S3 bucket but it's easier simply to + # install python natively + + # Download $storageUrl $downloadPath "python3.7.12-osx64.tar.gz" "${platform}/${pythonDir}" "Downloading Python interpreter..." + # cp -R "${downloadPath}/${platform}/${pythonDir}" "${analysisLayerPath}/bin/${platform}" + + if [ "${verbosity}" == "quiet" ]; then + brew install python@${pythonVersion} >/dev/null 2>/dev/null & + spin $! + else + brew install python@${pythonVersion} + fi + + # Brew specific path + pythonCmd="/usr/local/opt/python@${pythonVersion}/bin/python${pythonVersion}" + + fi + + writeLine "Done" $color_success + + # For Linux we'll use apt-get the deadsnakes PPA to get the old version of python. Deadsnakes? + # Old python? Get it? Get it?! And who said developers have no sense of humour. + else + + if [ ! "${verbosity}" == "loud" ]; then + + write "Installing Python ${pythonVersion}..." $color_primary + + if [ "${verbosity}" == "info" ]; then writeLine "Updating apt-get" $color_info; fi; + apt-get update -y >/dev/null 2>/dev/null & + spin $! + + if [ "${verbosity}" == "info" ]; then writeLine "Installing software-properties-common" $color_info; fi; + apt install software-properties-common -y >/dev/null 2>/dev/null & + spin $! + + if [ "${verbosity}" == "info" ]; then writeLine "Adding deadsnakes as a Python install source (PPA)" $color_info; fi; + add-apt-repository ppa:deadsnakes/ppa -y >/dev/null 2>/dev/null & + spin $! + + if [ "${verbosity}" == "info" ]; then writeLine "Updating apt" $color_info; fi; + apt update -y >/dev/null 2>/dev/null & + spin $! + + if [ "${verbosity}" == "info" ]; then writeLine "Installing Python ${pythonVersion}" $color_info; fi; + apt-get install python${pythonVersion} -y >/dev/null 2>/dev/null & + spin $! + + # apt-get install python3-pip + writeLine "Done" $color_success + else + writeLine "Updating apt-get" $color_info + apt-get update -y + writeLine "Installing software-properties-common" $color_info + apt install software-properties-common -y + writeLine "Adding deadsnakes as a Python install source (PPA)" $color_info + add-apt-repository ppa:deadsnakes/ppa -y + writeLine "Updating apt" $color_info + apt update -y + writeLine "Installing Python ${pythonVersion}" $color_primary + apt-get install python${pythonVersion} -y + # apt-get install python3-pip + writeLine "Done" $color_success + fi + fi + fi + + # ============================================================================ + # 2. Create Virtual Environment + + if [ -d "${installPath}/venv" ]; then + writeLine "Virtual Environment already present" $color_success + else + + # Make sure we have pythonNN-env installed + if [ "$platform" == "macos" ]; then + if [ "${verbosity}" == "quiet" ]; then + write "Installing Virtual Environment tools for mac..." $color_primary + pip3 install virtualenv virtualenvwrapper >/dev/null 2>/dev/null & + spin $! + writeLine "Done" $color_success + else + writeLine "Installing Virtual Environment tools for mac..." $color_primary + + # regarding the warning: See https://github.com/Homebrew/homebrew-core/issues/76621 + if [ "$platform" == "macos" ] && [ $(versionCompare "${pythonVersion}" '3.10.2') == "-1" ]; then + writeLine "Ignore the DEPRECATION warning. See https://github.com/Homebrew/homebrew-core/issues/76621 for details" $color_info + fi + + pip3 install virtualenv virtualenvwrapper + fi + else + if [ "${verbosity}" == "quiet" ]; then + write "Installing Virtual Environment tools for Linux..." $color_primary + + # just in case - but doesn't seem to be effective + # writeLine + # writeLine "First: correcting broken installs". + # apt --fix-broken install + + apt install python${pythonVersion}-venv >/dev/null 2>/dev/null & + spin $! + writeLine "Done" $color_success + else + writeLine "Installing Virtual Environment tools for Linux..." $color_primary + apt install python${pythonVersion}-venv + fi + fi + + # Create the virtual env + write "Creating Virtual Environment..." $color_primary + + if [ $verbosity == "loud" ]; then + writeLine "Install path is ${installPath}" + fi + + if [ "$platform" == "macos" ]; then + ${pythonCmd} -m venv "${installPath}/venv" + else + ${pythonCmd} -m venv "${installPath}/venv" & + spin $! # process ID of the unzip/tar call + fi + writeLine "Done" $color_success + fi + + pushd "${installPath}" >/dev/null + venvPath="$(pwd)/venv" + pythonInterpreterPath="${venvPath}/bin/python3" + popd >/dev/null + + # Ensure Python Exists + write "Checking for Python ${pythonVersion}..." $color_primary + pyVersion=$($pythonInterpreterPath --version) + write "Found ${pyVersion}. " $color_mute + + echo $pyVersion | grep "${pythonVersion}" >/dev/null + if [ $? -ne 0 ]; then + errorNoPython + fi + writeLine "present" $color_success +} + +function installPythonPackages () { + + # Whether or not to install all python packages in one step (-r requirements.txt) or step by step + oneStepPIP="true" + + pythonVersion=$1 + # Version with ".'s removed + local pythonName="python${pythonVersion/./}" + + pythonCmd="python${pythonVersion}" + + # Brew doesn't set PATH by default (nor do we need it to) which means we just have to be careful + if [ "$platform" == "macos" ]; then + + # If running "PythonX.Y" doesn't actually work, then let's adjust the python command + # to point to where we think the python launcher should be + python${pythonVersion} --version >/dev/null 2>/dev/null + if [ $? -ne 0 ]; then + # writeLine "Did not find python in default location" + pythonCmd="/usr/local/opt/python@${pythonVersion}/bin/python${pythonVersion}" + fi + fi + + # Quick check to ensure PIP is upo to date + if [ "${verbosity}" == "quiet" ]; then + write "Updating Python PIP..." + ${pythonCmd} -m pip install --upgrade pip >/dev/null 2>/dev/null & + spin $! + writeLine "Done" $color_success + else + writeLine "Updating Python PIP..." + # regarding the warning: See https://github.com/Homebrew/homebrew-core/issues/76621 + if [ "$platform" == "macos" ] && [ $(versionCompare "${pythonVersion}" '3.10.2') == "-1" ]; then + writeLine "Ignore the DEPRECATION warning. See https://github.com/Homebrew/homebrew-core/issues/76621 for details" $color_info + fi + ${pythonCmd} -m pip install --upgrade pip + fi + + requirementsPath=$2 + + testForPipExistanceName=$3 + + virtualEnv="${analysisLayerPath}/bin/${platform}/${pythonName}/venv" + + # ============================================================================ + # Install PIP packages + + write "Checking for required packages..." $color_primary + + # ASSUMPTION: If a folder by the name of "testForPipExistanceName" exists in the site-packages + # directory then we assume the requirements.txt file has already been processed. + + packagesPath="${virtualEnv}/lib/python${pythonVersion}/site-packages/" + + if [ ! -d "${packagesPath}/${testForPipExistanceName}" ]; then + + if [ ! "${verbosity}" == "quiet" ]; then + writeLine "Installing packages from ${requirementsPath}" $color_info + fi + writeLine "Packages missing. Installing..." $color_info + + if [ "${oneStepPIP}" == "true" ]; then + + # Install the Python Packages in one fell swoop. Not much feedback, but it works + write "Installing Packages into Virtual Environment..." $color_primary + if [ "${verbosity}" == "quiet" ]; then + ${pythonCmd} -m pip install -r ${requirementsPath} --target ${packagesPath} > /dev/null & + spin $! + else + ${pythonCmd} -m pip install -r ${requirementsPath} --target ${packagesPath} + fi + writeLine "Success" $color_success + + else + + # Open requirements.txt and grab each line. We need to be careful with --find-links lines + # as this doesn't currently work in Linux + currentOption="" + + IFS=$'\n' # set the Internal Field Separator as end of line + cat "${requirementsPath}" | while read -r line + do + + line="$(echo $line | tr -d '\r\n')" # trim newlines / CRs + + if [ "${line}" == "" ]; then + currentOption="" + elif [ "${line:0:2}" == "##" ]; then + currentOption="" + elif [ "${line:0:2}" == "#!" ]; then + currentOption="" + elif [ "${line:0:12}" == "--find-links" ]; then + currentOption="${line}" + else + + module="${line}" + description="" + + # breakup line into module name and description + IFS='#'; tokens=($module); IFS=$'\n'; + + if [ ${#tokens[*]} -gt 1 ]; then + module="${tokens[0]}" + description="${tokens[1]}" + fi + + if [ "${description}" == "" ]; then + description="Installing ${module}" + fi + + if [ "${module}" != "" ]; then + + # Some packages have a version nunber after a "==". We need to trim that here. + IFS='='; tokens=($module); IFS=$'\n'; + if [ ${#tokens[*]} -gt 1 ]; then + module="${tokens[0]}" + fi + currentOption="" # Given that we're stripping versions, ignore this too + + write " -${description}..." $color_primary + + pushd "${virtualEnv}/bin" > /dev/null + if [ "${verbosity}" == "quiet" ]; then + ./python${pythonVersion} -m pip install $module $currentOption $pipFlags >/dev/null 2>/dev/null & + spin $! + else + # echo python3 -m pip install $module $currentOption $pipFlags + ./python${pythonVersion} -m pip install $module $currentOption $pipFlags + fi + popd > /dev/null + + writeLine "Done" $color_success + + fi + + currentOption="" + + fi + + done + unset IFS + fi + else + writeLine "present." $color_success + fi +} + function errorNoPython () { - WriteLine - WriteLine - WriteLine "------------------------------------------------------------------------" $color_primary - WriteLine "Error: Python 3.7 not installed" $color_error - WriteLine - WriteLine + writeLine + writeLine + writeLine "------------------------------------------------------------------------" $color_primary + writeLine "Error: Python not installed" $color_error + writeLine + writeLine quit } @@ -310,7 +681,7 @@ function Download () { # dirToSave = packages if [ "${fileToGet}" == "" ]; then - WriteLine 'No download file was specified' $color_error + writeLine 'No download file was specified' $color_error quit # no point in carrying on fi @@ -318,12 +689,12 @@ function Download () { message="Downloading ${fileToGet}..." fi - # WriteLine "Downloading ${fileToGet} to ${downloadToDir}/${dirToSave}" $color_primary + # writeLine "Downloading ${fileToGet} to ${downloadToDir}/${dirToSave}" $color_primary - Write $message $color_primary + write $message $color_primary if [ -d "${downloadToDir}/${dirToSave}" ]; then - WriteLine " Directory already exists" $color_info + writeLine " Directory already exists" $color_info return 0 # This is ok and assumes it's already downloaded. Whether that's true or not... fi @@ -331,7 +702,7 @@ function Download () { if [ ! "${extension}" == ".gz" ]; then extension="${fileToGet:(-4)}" if [ ! "${extension}" == ".zip" ]; then - WriteLine "Unknown and unsupported file type for file ${fileToGet}" $color_error + writeLine "Unknown and unsupported file type for file ${fileToGet}" $color_error quit # no point in carrying on # return 1 @@ -339,13 +710,13 @@ function Download () { fi if [ ! -f "${downloadToDir}/${fileToGet}" ]; then - # WriteLine "Downloading ${fileToGet} to ${dirToSave}.zip in ${downloadToDir}" $color_warn + # writeLine "Downloading ${fileToGet} to ${dirToSave}.zip in ${downloadToDir}" $color_warn wget $wgetFlags --show-progress -O "${downloadToDir}/${fileToGet}" -P "${downloadToDir}" \ "${storageUrl}${fileToGet}" status=$? if [ $status -ne 0 ]; then - WriteLine "The wget command failed for file ${fileToGet}." $color_error + writeLine "The wget command failed for file ${fileToGet}." $color_error quit # no point in carrying on # return 2 @@ -353,13 +724,13 @@ function Download () { fi if [ ! -f "${downloadToDir}/${fileToGet}" ]; then - WriteLine "The downloaded file '${fileToGet}' doesn't appear to exist." $color_error + writeLine "The downloaded file '${fileToGet}' doesn't appear to exist." $color_error quit # no point in carrying on # return 3 fi - Write 'Expanding...' $color_info + write 'Expanding...' $color_info pushd "${downloadToDir}" >/dev/null @@ -376,7 +747,7 @@ function Download () { spin $! # process ID of the unzip/tar call if [[ ! -d "${dirToSave}" ]]; then - WriteLine "Unable to extract download. Can you please check you have write permission to "${dirToSave}"." $color_error + writeLine "Unable to extract download. Can you please check you have write permission to "${dirToSave}"." $color_error quit # no point in carrying on fi @@ -384,7 +755,49 @@ function Download () { # rm /s /f /q "${downloadToDir}/${fileToGet}" >/dev/null - WriteLine 'Done.' $color_success + writeLine 'Done.' $color_success +} + +# Thanks: https://stackoverflow.com/a/4025065 with mods +# compares two version numbers (eg 3.9.12 < 3.10.1) +versionCompare () { + + # trivial equal case + if [[ $1 == $2 ]]; then + echo "0" + return 0 + fi + + local IFS=. + local i ver1=($1) ver2=($2) + + # fill empty fields in ver1 with zeros + for ((i=${#ver1[@]}; i<${#ver2[@]}; i++)) + do + ver1[i]=0 + done + + for ((i=0; i<${#ver1[@]}; i++)) + do + if [[ -z ${ver2[i]} ]] + then + # fill empty fields in ver2 with zeros + ver2[i]=0 + fi + + if ((10#${ver1[i]} > 10#${ver2[i]})) + then + echo "1" # $1 > $2 + return 0 + fi + if ((10#${ver1[i]} < 10#${ver2[i]})) + then + echo "-1" # $1 < $2 + return 0 + fi + done + + echo "0" } function getDisplaySize () { @@ -393,32 +806,30 @@ function getDisplaySize () { } function displayMacOSPermissionError () { - WriteLine - WriteLine "Unable to Create a Directory" $color_error + writeLine + writeLine "Unable to Create a Directory" $color_error if [[ $OSTYPE == 'darwin'* ]]; then local commonDir=$1 - WriteLine - WriteLine "But we may be able to suggest something:" $color_info - - WriteLine '1. Pull down the  Apple menu and choose "System Preferences"' - WriteLine '2. Choose “Security & Privacy” control panel' - WriteLine '3. Now select the “Privacy” tab, then from the left-side menu select' - WriteLine ' “Full Disk Access”' - WriteLine '4. Click the lock icon in the lower left corner of the preference ' - WriteLine ' panel and authenticate with an admin level login' - WriteLine '5. Now click the [+] plus button to add an application with full disk' - WriteLine ' access' - WriteLine '6. Click the Plus button to add Terminal to Full Disk Access in macOS' - WriteLine "7. Navigate to the '${commonDir}' folder and choose 'Terminal'" - WriteLine ' to grant Terminal with Full Disk Access privileges' - WriteLine '8. Select "Terminal app" to grant full disk access in MacOS' - WriteLine '9. Relaunch Terminal, the “Operation not permitted” error messages will' - WriteLine ' be gone' - WriteLine - WriteLine 'Thanks to https://osxdaily.com/2018/10/09/fix-operation-not-permitted-terminal-error-macos/' + writeLine + writeLine "But we may be able to suggest something:" $color_info + + # Note that  will appear as the Apple symbol on macOS, but probably not on Windows or Linux + writeLine '1. Pull down the  Apple menu and choose "System Preferences"' + writeLine '2. Choose “Security & Privacy” control panel' + writeLine '3. Now select the “Privacy” tab, then from the left-side menu select' + writeLine ' “Full Disk Access”' + writeLine '4. Click the lock icon in the lower left corner of the preference ' + writeLine ' panel and authenticate with an admin level login' + writeLine '5. Now click the [+] plus button so we can full disk access to Terminal' + writeLine "6. Navigate to the /Applications/Utilities/ folder and choose 'Terminal'" + writeLine ' to grant Terminal Full Disk Access privileges' + writeLine '7. Relaunch Terminal, the “Operation not permitted” error messages should' + writeLine ' be gone' + writeLine + writeLine 'Thanks to https://osxdaily.com/2018/10/09/fix-operation-not-permitted-terminal-error-macos/' fi quit @@ -426,10 +837,10 @@ function displayMacOSPermissionError () { function needRosettaAndiBrew () { - WriteLine - WriteLine "You're on an Mx Mac running ARM but Python3.7 only works on Intel." $color_error - WriteLine "You will need to install Rosetta2 to continue." $color_error - WriteLine + writeLine + writeLine "You're on an Mx Mac running ARM but Python3 only works on Intel." $color_error + writeLine "You will need to install Rosetta2 to continue." $color_error + writeLine read -p 'Install Rosetta2 (Y/N)?' installRosetta if [ "${installRosetta}" == "y" ] || [ "${installRosetta}" == "Y" ]; then /usr/sbin/softwareupdate --install-rosetta --agree-to-license @@ -437,7 +848,7 @@ function needRosettaAndiBrew () { quit fi - WriteLine "Then you need to install brew under Rosetta (We'll alias it as ibrew)" + writeLine "Then you need to install brew under Rosetta (We'll alias it as ibrew)" read -p 'Install brew for x86 (Y/N)?' installiBrew if [ "${installiBrew}" == "y" ] || [ "${installiBrew}" == "Y" ]; then arch -x86_64 /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)" @@ -485,52 +896,29 @@ fi clear # verbosity can be: quiet | info | loud -verbosity="info" +verbosity="quiet" # If files are already present, then don't overwrite if this is false forceOverwrite=false # Platform can define where things are located if [[ $OSTYPE == 'darwin'* ]]; then - platform='osx' + platform='macos' else platform='linux' fi + # Basic locations # The location of the solution root directory relative to this script rootPath='../..' -# SenseAI specific ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: +# SenseAI Server specific :::::::::::::::::::::::::::::::::::::::::::::::::::: # The name of the dir holding the frontend API server senseAPIDir='API' -# TextSummary specific ::::::::::::::::::::::::::::::::::::::::::::::::::::::: - -textSummaryDir='TextSummary' - -# DeepStack specific ::::::::::::::::::::::::::::::::::::::::::::::::::::::::: - -# The name of the dir holding the DeepStack analysis services -deepstackDir='DeepStack' - -# The name of the dir containing the Python code itself -intelligenceDir='intelligencelayer' - -# The name of the dir containing the AI models themselves -modelsDir='assets' - -# The name of the dir containing persisted DeepStack data -datastoreDir='datastore' - -# The name of the dir containing temporary DeepStack data -tempstoreDir='tempstore' - -# Yolo.Net specific -yoloNetDir='CodeProject.SenseAI.AnalysisLayer.Yolo' -yoloModelsDir='yoloModels' # Shared ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: @@ -547,13 +935,6 @@ srcDir='src' # be downloaded downloadDir='downloads' -# The name of the dir containing the Python interpreter -pythonDir='python37' -pythonCmd='python3.7' - -# Whether or not to install all python packages in one step (-r requirements.txt) or step by step -oneStepPIP="true" - # The name of the dir holding the backend analysis services analysisLayerDir='AnalysisLayer' @@ -592,14 +973,14 @@ elif [ $verbosity == "loud" ]; then tarFlags='xvf' fi -WriteLine ' Setting up CodeProject.SenseAI Development Environment ' 'DarkYellow' -WriteLine ' ' 'DarkGreen' -WriteLine '========================================================================' 'DarkGreen' -WriteLine ' ' 'DarkGreen' -WriteLine ' CodeProject SenseAI Installer ' 'DarkGreen' -WriteLine ' ' 'DarkGreen' -WriteLine '========================================================================' 'DarkGreen' -WriteLine ' ' 'DarkGreen' +writeLine ' Setting up CodeProject.SenseAI Development Environment ' 'DarkYellow' +writeLine ' ' 'DarkGreen' +writeLine '========================================================================' 'DarkGreen' +writeLine ' ' 'DarkGreen' +writeLine ' CodeProject SenseAI Installer ' 'DarkGreen' +writeLine ' ' 'DarkGreen' +writeLine '========================================================================' 'DarkGreen' +writeLine ' ' 'DarkGreen' # ============================================================================ # House keeping @@ -608,376 +989,205 @@ checkForTool wget checkForTool unzip if [ "$platform" == "linux" ] && [ "$EUID" -ne 0 ]; then - WriteLine "Please run this script as root: sudo bash setup_dev_env_linux.sh" $color_error + writeLine "Please run this script as root: sudo bash setup_dev_env_linux.sh" $color_error exit fi -# M1 macs are trouble for python -if [ "$platform" == "osx" ] && [[ $(uname -p) == 'arm' ]]; then - Write "ARM (Apple silicon) Mac detected, not running under Rosetta. " $color_warn - if [ $(/usr/bin/pgrep oahd >/dev/null 2>&1; echo $?) -gt 0 ]; then - #if [ "$(pkgutil --files com.apple.pkg.RosettaUpdateAuto)" == "" ]; then - WriteLine 'Rosetta is not installed' $color_error - needRosettaAndiBrew - else - WriteLine 'Rosetta installed. We''re good to go' $color_success - fi -fi - # ============================================================================ # 1. Ensure directories are created and download required assets +writeLine +writeLine 'General SenseAI setup ' "White" "Blue" + # Create some directories -Write "Creating Directories..." $color_primary # For downloading assets -mkdir -p "${downloadPath}" +write "Creating Directories..." $color_primary +if [ $verbosity == "loud" ]; then writeLine "downloadPath is ${downloadPath}"; fi; -# For Text Summary -textSummaryPath="${analysisLayerPath}/${textSummaryDir}" - -# For DeepStack -deepStackPath="${analysisLayerPath}/${deepstackDir}" -mkdir -p "${deepStackPath}/${tempstoreDir}" - -# To do this properly we're going to use the standard directories for common application data -# commonDataDir="${deepStackPath}/${datastoreDir}" -commonDataDir='/usr/share/CodeProject/SenseAI' -if [ "$platform" == "osx" ]; then commonDataDir='~/Library/Application Support/CodeProject/SenseAI'; fi - -mkdir -p "$commonDataDir" -if [ $? -ne 0 ]; then - displayMacOSPermissionError "$commonDataDir" -fi -chmod 777 "$commonDataDir" - -# For Yolo.NET -yoloNetPath=${analysisLayerPath}/${yoloNetDir} - -WriteLine "Done" $color_success - -pythonInstallPath="${analysisLayerPath}/bin/${platform}/${pythonDir}" - -# Clean up directories to force a re-download if necessary -if [ "${forceOverwrite}" == "true" ]; then - - # Force re-download - rm -rf "${downloadPath}/${modelsDir}" - rm -rf "${downloadPath}/${yoloModelsDir}" - - # force overwrite - rm -rf "${deepStackPath}/${modelsDir}" - rm -rf "${yoloNetPath}/${modelsDir}" +mkdir -p "${downloadPath}" +if [ "$platform" == "macos" ]; then + write "We'll need to run under root to set permissions. " $color_warn + sudo chmod 777 "${downloadPath}" +else + write "Creating Directories..." $color_primary fi +writeLine "Done" $color_success -# Install Python 3.7. Using deadsnakes for Linux (not macOS), so be aware if you have concerns -# about potential late adoption of security patches. - -if [ ! -d "${pythonInstallPath}" ]; then - # mkdir -p "${analysisLayerPath}/bin/${platform}" - # mkdir -p "${analysisLayerPath}/bin/${platform}/${pythonDir}" - mkdir -p "$pythonInstallPath" -fi +# TextSummary specific ::::::::::::::::::::::::::::::::::::::::::::::::::::::: -if command -v $pythonCmd &> /dev/null; then - WriteLine "Python 3.7 is already installed" $color_success -else +writeLine +writeLine 'TextSummary setup ' "White" "Blue" - # For macOS we'll use brew to install python - if [ "$platform" == "osx" ]; then +# The name of the dir containing the TextSummary module +moduleDir='TextSummary' - if [[ $(uname -p) == 'arm' ]]; then +# Full path to the TextSummary dir +modulePath="${analysisLayerPath}/${moduleDir}" - # Apple silicon requires Rosetta2 for python3.7 to run, so use the x86 version of Brew - # we installed earlier - if [ "${verbosity}" == "quiet" ]; then - arch -x86_64 /usr/local/bin/brew install python@3.7 >/dev/null 2>/dev/null & - spin $! - else - arch -x86_64 /usr/local/bin/brew install python@3.7 - fi +setupPython 3.8 +installPythonPackages 3.8 "${modulePath}/requirements.txt" "nltk" - # Note that we only need the specific location of the python interpreter to setup the - # virtual environment. After it's setup, all python calls are relative to the same venv - # no matter the location of the original python interpreter - pythonCmd='/usr/local/opt/python@3.7/bin/python3.7' - else +# Background Remover ::::::::::::::::::::::::::::::::::::::::::::::::::::::::: - # We have a x64 version of python for macOS in our S3 bucket but it's easier simply to - # install python natively +writeLine +writeLine 'Background Remover setup ' "White" "Blue" - # Download $storageUrl $downloadPath "python3.7.12-osx64.tar.gz" "${platform}/${pythonDir}" "Downloading Python interpreter..." - # cp -R "${downloadPath}/${platform}/${pythonDir}" "${analysisLayerPath}/bin/${platform}" +# The name of the dir containing the background remover module +moduleDir='BackgroundRemover' - if [ "${verbosity}" == "quiet" ]; then - brew install python@3.7 >/dev/null 2>/dev/null & - spin $! - else - brew install python@3.7 - fi +# The name of the dir containing the background remover models +modulePath="${analysisLayerPath}/${moduleDir}" - fi +# The name of the dir containing the background remover models +moduleAssetsDir='models' - # For Linux we'll use apt-get the deadsnakes PPA to get the old version of python. Deadsnakes? - # Old python? Get it? Get it?! And who said developers have no sense of humour. - else +# The name of the file in our S3 bucket containing the assets required for this module +modelsAssetFilename='rembg-models.zip' - if [ "${verbosity}" == "quiet" ]; then - Write "Installing Python 3.7..." $color_primary - apt-get update -y >/dev/null 2>/dev/null & - spin $! - apt install software-properties-common -y >/dev/null 2>/dev/null & - spin $! - add-apt-repository ppa:deadsnakes/ppa -y >/dev/null 2>/dev/null & - spin $! - apt update -y >/dev/null 2>/dev/null & - spin $! - apt-get install python3.7 -y >/dev/null 2>/dev/null & - spin $! - WriteLine "Done" $color_success - else - WriteLine "Installing Python 3.7" $color_primary - apt-get update -y - apt install software-properties-common -y - add-apt-repository ppa:deadsnakes/ppa -y - apt update -y - apt-get install python3.7 -y - WriteLine "Done" $color_success - fi +setupPython 3.9 +installPythonPackages 3.9 "${modulePath}/requirements.txt" "onnxruntime" - # This is just in case: Correct https://askubuntu.com/a/1090081 - { - cp /usr/lib/python3/dist-packages/apt_pkg.cpython-35m-x86_64-linux-gnu.so /usr/lib/python3.7/apt_pkg.cpython-37m-x86_64-linux-gnu.so >/dev/null 2>/dev/null - ln -s /usr/lib/python3.5/lib-dynload/_gdbm.cpython-35m-x86_64-linux-gnu.so /usr/lib/python3.7/lib-dynload/_gdbm.cpython-37m-x86_64-linux-gnu.so >/dev/null 2>/dev/null - } >/dev/null 2>/dev/null - fi +# Clean up directories to force a re-copy if necessary +if [ "${forceOverwrite}" == "true" ]; then + rm -rf "${downloadPath}/${moduleDir}" + rm -rf "${modulePath}/${moduleAssetsDir}" fi -# We need to be sure on linux that pip/venv is available for python3.7 specifically. Brew on macOS -# adds pip3 within the brew formula, so just worry about linux -if [ "$platform" == "linux" ]; then - Write "Installing PIP and venv to enable final Python environment setup..." $color_primary - - if [ "${verbosity}" == "quiet" ]; then - apt-get install python3-pip -y >/dev/null 2>/dev/null - apt-get install python3.7-venv -y >/dev/null 2>/dev/null - else - apt-get install python3-pip -y - apt-get install python3.7-venv -y +if [ ! -d "${modulePath}/${moduleAssetsDir}" ]; then + Download $storageUrl "${downloadPath}" $modelsAssetFilename "${moduleDir}" "Downloading models..." + if [ -d "${downloadPath}/${moduleDir}" ]; then + mv -f "${downloadPath}/${moduleDir}" "${modulePath}/${moduleAssetsDir}" fi - - WriteLine "Done" $color_success fi -Write "Downloading modules and models: " $color_primary -WriteLine "Starting" $color_mute - -# Download the models -if [ ! -d "${deepStackPath}/${modelsDir}" ]; then - Download $storageUrl $downloadPath "models.zip" "${modelsDir}" "Downloading models..." - if [ -d "${downloadPath}/${modelsDir}" ]; then - mv -f "${downloadPath}/${modelsDir}" "${deepStackPath}/${modelsDir}" - fi -fi -if [ ! -d "${yoloNetPath}/${modelsDir}" ]; then - Download $storageUrl $downloadPath "yolonet-models.zip" "${yoloModelsDir}" "Downloading Yolo.Net models..." - if [ -d "${downloadPath}/${yoloModelsDir}" ]; then - mv -f "${downloadPath}/${yoloModelsDir}" "${yoloNetPath}/${modelsDir}" - fi -fi +# DeepStack specific ::::::::::::::::::::::::::::::::::::::::::::::::::::::::: -WriteLine "Modules and models downloaded" $color_success +writeLine +writeLine 'Vision toolkit setup ' "White" "Blue" -# ============================================================================ -# 2. Create & Activate Virtual Environment: DeepStack specific / Python 3.7 +# The name of the dir containing the background remover module +moduleDir='DeepStack' -Write "Creating Virtual Environment..." $color_primary +# The name of the dir containing the background remover models +modulePath="${analysisLayerPath}/${moduleDir}" -if [ -d "${pythonInstallPath}/venv" ]; then - WriteLine "Already present" $color_success -else +# The name of the dir containing the background remover models +moduleAssetsDir='assets' - #if [ "$platform" == "osx" ]; then - # "$pythonInstallPath/bin/python3.7" -m venv "${pythonInstallPath}/venv" & - #else - ${pythonCmd} -m venv "${pythonInstallPath}/venv" & - #fi +# The name of the file in our S3 bucket containing the assets required for this module +modelsAssetFilename='models.zip' - spin $! # process ID of the unzip/tar call - WriteLine "Done" $color_success +setupPython 3.8 +if [ "$platform" == "macos" ]; then + installPythonPackages 3.8 "${modulePath}/intelligencelayer/requirements.macos.txt" "torch" +else + installPythonPackages 3.8 "${modulePath}/intelligencelayer/requirements.txt" "torch" fi -Write "Enabling our Virtual Environment..." $color_primary -pushd "${pythonInstallPath}" >/dev/null - -# PYTHONHOME="$(pwd)/venv" -# export PYTHONHOME - -VIRTUAL_ENV="$(pwd)/venv" -export VIRTUAL_ENV - -PATH="$VIRTUAL_ENV/bin:$PATH" -export PATH - -pythonInterpreterPath="${VIRTUAL_ENV}/bin/python3" - -PS1="(venv) ${PS1:-}" - -popd >/dev/null -WriteLine "Done" $color_success - -# Ensure Python Exists -Write "Checking for Python 3.7..." $color_primary -pyVersion=$($pythonInterpreterPath --version) -Write "Found ${pyVersion}. " $color_mute - -echo $pyVersion | grep "3.7" >/dev/null -if [ $? -ne 0 ]; then - errorNoPython -fi -WriteLine "present" $color_success - -# ============================================================================ -# 3a. Install PIP packages - -Write "Installing Python package manager..." $color_primary -pushd "$VIRTUAL_ENV/bin" > /dev/null -./python3 -m pip install --upgrade pip $pipFlags & -spin $! -popd > /dev/null -WriteLine "Done" $color_success +# Clean up directories to force a re-copy if necessary +if [ "${forceOverwrite}" == "true" ]; then + rm -rf "${downloadPath}/${moduleDir}" + rm -rf "${modulePath}/${moduleAssetsDir}" +fi -Write "Checking for required packages..." $color_primary -# ASSUMPTION: If venv/lib/python3.7/site-packages/torch exists then no need to do this -if [ ! -d "${VIRTUAL_ENV}/lib/python3.7/site-packages/torch" ]; then +if [ ! -d "${modulePath}/${moduleAssetsDir}" ]; then + Download $storageUrl "${downloadPath}" $modelsAssetFilename "${moduleDir}" "Downloading models..." + if [ -d "${downloadPath}/${moduleDir}" ]; then + mv -f "${downloadPath}/${moduleDir}" "${modulePath}/${moduleAssetsDir}" + fi +fi - WriteLine "Packages missing. Installing..." $color_info - requirementsFile="${deepStackPath}/${intelligenceDir}/requirements.txt" +# Deepstack needs these to store temp and pesrsisted data +mkdir -p "${deepStackPath}/${tempstoreDir}" - if [ "${oneStepPIP}" == "true" ]; then +# To do this properly we're going to use the standard directories for common application data +# mkdir -p "${deepStackPath}/${datastoreDir}" +commonDataDir='/usr/share/CodeProject/SenseAI' +if [ "$platform" == "macos" ]; then + commonDataDir="/Library/Application Support/CodeProject/SenseAI" +fi - # Install the Python Packages in one fell swoop. Not much feedback, but it works - Write "Installing Packages into Virtual Environment..." $color_primary - #pip install -r "${requirementsFile}" $pipFlags - pushd "$VIRTUAL_ENV/bin" > /dev/null - ./python3 -m pip install -r ${requirementsFile} > /dev/null - popd > /dev/null - WriteLine "Success" $color_success +if [ ! -d "${commonDataDir}" ]; then + if [ "$platform" == "macos" ]; then + if [[ $EUID > 0 ]]; then + writeLine "Creating data directory at ${commonDataDir}. We'll need admin access..." $color_info + fi + sudo mkdir -p "${commonDataDir}" + if [ $? -ne 0 ]; then + displayMacOSPermissionError "${commonDataDir}" + fi + sudo chmod 777 "${commonDataDir}" else + mkdir -p "${commonDataDir}" + fi +fi - # Open requirements.txt and grab each line. We need to be careful with --find-links lines - # as this doesn't currently work in Linux - currentOption="" - - IFS=$'\n' # set the Internal Field Separator as end of line - cat "${requirementsFile}" | while read -r line - do - - line="$(echo $line | tr -d '\r\n')" # trim newlines / CRs - - if [ "${line}" == "" ]; then - currentOption="" - elif [ "${line:0:2}" == "##" ]; then - currentOption="" - elif [ "${line:0:2}" == "#!" ]; then - currentOption="" - elif [ "${line:0:12}" == "--find-links" ]; then - currentOption="${line}" - else - - module="${line}" - description="" - - # breakup line into module name and description - IFS='#'; tokens=($module); IFS=$'\n'; - - if [ ${#tokens[*]} -gt 1 ]; then - module="${tokens[0]}" - description="${tokens[1]}" - fi - - if [ "${description}" == "" ]; then - description="Installing ${module}" - fi - - if [ "${module}" != "" ]; then - - # Some packages have a version nunber after a "==". We need to trim that here. - IFS='='; tokens=($module); IFS=$'\n'; - if [ ${#tokens[*]} -gt 1 ]; then - module="${tokens[0]}" - fi - currentOption="" # Given that we're stripping versions, ignore this too - - Write " -${description}..." $color_primary +# For Yolo.NET ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: - pushd "$VIRTUAL_ENV/bin" > /dev/null - if [ "${verbosity}" == "quiet" ]; then - ./python3 -m pip install $module $currentOption $pipFlags >/dev/null 2>/dev/null & - spin $! - else - # echo python3 -m pip install $module $currentOption $pipFlags - ./python3 -m pip install $module $currentOption $pipFlags - fi - popd > /dev/null +writeLine +writeLine 'Object Detector setup ' "White" "Blue" - WriteLine "Done" $color_success +# The name of the dir containing the background remover module +moduleDir='CodeProject.SenseAI.AnalysisLayer.Yolo' - fi +# The name of the dir containing the background remover models +modulePath="${analysisLayerPath}/${moduleDir}" - currentOption="" +# The name of the dir containing the background remover models +moduleAssetsDir='assets' - fi +# The name of the file in our S3 bucket containing the assets required for this module +modelsAssetFilename='yolonet-models.zip' - done - unset IFS - fi -else - WriteLine "present." $color_success +# Clean up directories to force a re-copy if necessary +if [ "${forceOverwrite}" == "true" ]; then + rm -rf "${downloadPath}/${moduleDir}" + rm -rf "${modulePath}/${moduleAssetsDir}" fi -# ============================================================================ -# 3b. Install PIP packages for TextSummary - -Write "Installing required Text Processing packages..." $color_primary - -# making an assumption here that the presence of site-packages/nltk means TextSummary packages installed -if [ ! -d "${VIRTUAL_ENV}/lib/python3.7/site-packages/nltk" ]; then - pushd "$VIRTUAL_ENV/bin" > /dev/null - ./python3 -m pip install -r "${textSummaryPath}/requirements.txt" $pipFlags >/dev/null 2>/dev/null & - spin $! - popd > /dev/null - WriteLine "Done" $color_success -else - WriteLine "Already present" $color_success +if [ ! -d "${modulePath}/${moduleAssetsDir}" ]; then + Download $storageUrl "${downloadPath}" $modelsAssetFilename "${moduleDir}" "Downloading models..." + if [ -d "${downloadPath}/${moduleDir}" ]; then + mv -f "${downloadPath}/${moduleDir}" "${modulePath}/${moduleAssetsDir}" + fi fi -# ============================================================================ -# 3c. Install libraries for .NET Yolo inferrer - # libfontconfig1 is required for SkiaSharp, libgdplus is required for System.Drawing +write "Installing supporting image libraries..." if [ "$platform" == "linux" ]; then if [ "${verbosity}" == "quiet" ]; then - apt-get install libfontconfig1 -y >/dev/null 2>/dev/null - apt-get install libgdiplus -y >/dev/null 2>/dev/null + apt-get install libfontconfig1 -y >/dev/null 2>/dev/null & + spin $! + apt-get install libgdiplus -y >/dev/null 2>/dev/null & + spin $! else apt-get install libfontconfig1 -y apt-get install libgdiplus -y fi else - brew install fontconfig - brew install mono-libgdiplus + if [ "${verbosity}" == "quiet" ]; then + brew install fontconfig >/dev/null 2>/dev/null & + spin $! + brew install mono-libgdiplus >/dev/null 2>/dev/null & + spin $! + else + brew install fontconfig + brew install mono-libgdiplus + fi fi +writeLine "Done" $color_success + # ============================================================================ # ...and we're done. -WriteLine -WriteLine ' Development Environment setup complete ' 'White' 'DarkGreen' -WriteLine +writeLine +writeLine ' Development Environment setup complete ' 'White' 'DarkGreen' +writeLine quit \ No newline at end of file diff --git a/Installers/Dev/setup_dev_env_win.bat b/Installers/Dev/setup_dev_env_win.bat index 73376ea8..aefd1cb8 100644 --- a/Installers/Dev/setup_dev_env_win.bat +++ b/Installers/Dev/setup_dev_env_win.bat @@ -10,7 +10,7 @@ cls setlocal enabledelayedexpansion :: verbosity can be: quiet | info | loud -set verbosity=quiet +set verbosity=info :: If files are already present, then don't overwrite if this is false set forceOverwrite=false @@ -21,40 +21,17 @@ set useColor=true :: Platform can define where things are located set platform=windows + :: Basic locations :: The location of the solution root directory relative to this script set rootPath=../.. -:: SenseAI specific ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: +:: SenseAI Server specific :::::::::::::::::::::::::::::::::::::::::::::::::::: :: The name of the dir holding the frontend API server set senseAPIDir=API -:: TextSummary specific ::::::::::::::::::::::::::::::::::::::::::::::::::::::: - -set textSummaryDir=TextSummary - -:: DeepStack specific ::::::::::::::::::::::::::::::::::::::::::::::::::::::::: - -:: The name of the dir holding the DeepStack analysis services -set deepstackDir=DeepStack - -:: The name of the dir containing the Python code itself -set intelligenceDir=intelligencelayer - -:: The name of the dir containing the AI models themselves -set modelsDir=assets - -:: The name of the dir containing persisted DeepStack data -set datastoreDir=datastore - -:: The name of the dir containing temporary DeepStack data -set tempstoreDir=tempstore - -:: Yolo.Net specific -set yoloNetDir=CodeProject.SenseAI.AnalysisLayer.Yolo -set yoloModelsDir=yoloModels :: Shared ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: @@ -71,9 +48,6 @@ set srcDir=src :: be downloaded set downloadDir=downloads -:: The name of the dir containing the Python interpreter -set pythonDir=python37 - :: The name of the dir holding the backend analysis services set analysisLayerDir=AnalysisLayer @@ -121,184 +95,149 @@ call :WriteLine " :: ============================================================================ :: 1. Ensure directories are created and download required assets +call :WriteLine +call :WriteLine "General SenseAI setup" "DarkGreen" + :: Create some directories call :Write "Creating Directories..." :: For downloading assets if not exist "%downloadPath%\" mkdir "%downloadPath%" +call :WriteLine "Done" "Green" -:: For Text Summary -set textSummaryPath=%analysisLayerPath%\%textSummaryDir% -:: For DeepStack -set deepStackPath=%analysisLayerPath%\%deepstackDir% -if not exist "%deepStackPath%\%tempstoreDir%\" mkdir "%deepStackPath%\%tempstoreDir%" -if not exist "%deepStackPath%\%datastoreDir%\" mkdir "%deepStackPath%\%datastoreDir%" +:: TextSummary specific ::::::::::::::::::::::::::::::::::::::::::::::::::::::: -:: For Yolo.NET -set yoloNetPath=%analysisLayerPath%\%yoloNetDir% +call :WriteLine +call :WriteLine "TextSummary setup" "DarkGreen" -call :WriteLine "Done" Green +:: The name of the dir containing the TextSummary module +set moduleDir=TextSummary -call :Write "Downloading utilities and models: " -call :WriteLine "Starting" Gray +:: Full path to the TextSummary dir +set modulePath=%analysisLayerPath%\%moduleDir% -set pythonInstallPath=%analysisLayerPath%\bin\%platform%\%pythonDir% +call :SetupPython 3.7 +call :InstallPythonPackages 3.7 "%modulePath%\requirements.txt" "nltk" -:: Clean up directories to force a re-download if necessary -if /i "%forceOverwrite%" == "true" ( - REM Force Re-download - if exist "%downloadPath%\%platform%\%pythonDir%" rmdir /s "%rmdirFlags% %downloadPath%\%platform%\%pythonDir%" - if exist "%downloadPath%\%modelsDir%" rmdir /s "%rmdirFlags% %downloadPath%\%modelsDir%" - if exist "%downloadPath%\%yoloModelsDir%" rmdir /s "%rmdirFlags% %downloadPath%\%yoloModelsDir%" +:: Background Remover ::::::::::::::::::::::::::::::::::::::::::::::::::::::::: - REM Force overwrite - if exist "%pythonInstallPath%" rmdir /s "%rmdirFlags% %pythonInstallPath%" - if exist "%deepStackPath%\%modelsDir%" rmdir /s "%rmdirFlags% %deepStackPath%\%modelsDir%" - if exist "%yoloNetPath%\%modelsDir%" rmdir /s "%rmdirFlags% %yoloNetPath%\%modelsDir%" -) +call :WriteLine +call :WriteLine "Background Remover setup" "DarkGreen" -:: Download whatever packages are missing -if not exist "%pythonInstallPath" ( - if not exist "%downloadPath%\%platform%\" mkdir "%downloadPath%\%platform%" - if not exist "%pythonInstallPath%" ( - call :Download "%storageUrl%" "%downloadPath%\%platform%\" "python37.zip" "%pythonDir%" ^ - "Downloading Python interpreter..." - if exist "%downloadPath%\%platform%\%pythonDir%" ( - robocopy /e "%downloadPath%\%platform%\%pythonDir% " "%pythonInstallPath% " !roboCopyFlags! > NUL - ) - ) -) -if not exist "%deepStackPath%\%modelsDir%" ( - call :Download "%storageUrl%" "%downloadPath%\" "models.zip" "%modelsDir%" ^ - "Downloading models..." - if exist "%downloadPath%\%modelsDir%" ( - robocopy /e "%downloadPath%\%modelsDir% " "%deepStackPath%\%modelsDir% " !roboCopyFlags! > NUL - ) -) -if not exist "%yoloNetPath%\%modelsDir%" ( - call :Download "%storageUrl%" "%downloadPath%\" "yolonet-models.zip" "%yoloModelsDir%" ^ - "Downloading Yolo.Net models..." - if exist %downloadPath%\%yoloModelsDir% ( - robocopy /e "%downloadPath%\%yoloModelsDir% " "%yoloNetPath%\%modelsDir% " !roboCopyFlags! > NUL - ) -) +:: The name of the dir containing the Background Remover module +set moduleDir=BackgroundRemover -call :WriteLine "Modules and models downloaded" "Green" +:: The full path of the background remover module +set modulePath=%analysisLayerPath%\%moduleDir% -:: Copy over the startup script -:: call :Write "Copying over startup script..." -:: copy /Y "Start_SenseAI_Win.bat" "!absoluteRootDir!" >nul 2>nul -:: :WriteLine "Done." "Green" +:: The name of the dir containing the background remover models +set moduleAssetsDir=models +:: The name of the file in our S3 bucket containing the assets required for this module +set modelsAssetFilename=rembg-models.zip -:: ============================================================================ -:: 2. Create & Activate Virtual Environment: DeepStack specific / Python 3.7 +:: Install python and the required dependencies +call :SetupPython 3.9 +call :InstallPythonPackages 3.9 "%modulePath%\requirements.txt" "onnxruntime" -call :Write "Creating Virtual Environment..." -if exist "%pythonInstallPath%\venv" ( - call :WriteLine "Already present" "Green" -) else ( - "%pythonInstallPath%\python.exe" -m venv "%pythonInstallPath%\venv" - call :WriteLine "Done" "Green" +:: Clean up directories to force a download and re-copy if necessary +if /i "%forceOverwrite%" == "true" ( + if exist "%downloadPath%\%moduleDir%" rmdir /s %rmdirFlags% "%downloadPath%\%moduleDir%" + if exist "%modulePath%\%moduleAssetsDir%" rmdir /s %rmdirFlags% "%modulePath%\%moduleAssetsDir%" ) -call :Write "Enabling our Virtual Environment..." -pushd "%pythonInstallPath%" - -:: set PYTHONHOME="%cd%\venv\Scripts" -set VIRTUAL_ENV=%cd%\venv -set PYTHONHOME= -set PATH=!VIRTUAL_ENV!\Scripts;%PATH% +:: Location of models as per original repo +:: u2netp: https://drive.google.com/uc?id=1tNuFmLv0TSNDjYIkjEdeH1IWKQdUA4HR +:: u2net: https://drive.google.com/uc?id=1tCU5MM1LhRgGou5OpmpjBQbSrYIUoYab +:: u2net_human_seg: https://drive.google.com/uc?id=1ZfqwVxu-1XWC1xU1GHIP-FM_Knd_AX5j +:: u2net_cloth_seg: https://drive.google.com/uc?id=15rKbQSXQzrKCQurUjZFg8HqzZad8bcyz + +if not exist "%modulePath%\%moduleAssetsDir%" ( + call :Download "%storageUrl%" "%downloadPath%\" "%modelsAssetFilename%" "%moduleDir%" ^ + "Downloading Background Remover models..." + if exist "%downloadPath%\%modulesDir%" ( + robocopy /e "%downloadPath%\%moduleDir% " "%modulePath%\%moduleAssetsDir% " !roboCopyFlags! > NUL + ) +) -set pythonInterpreterPath="!VIRTUAL_ENV!\python3" -if not defined PROMPT set PROMPT=$P$G -set PROMPT=(venv) !PROMPT! +:: For DeepStack Vision AI ::::::::::::::::::::::::::::::::::::::::::::::::: -popd -call :WriteLine "Done" "Green" +call :WriteLine +call :WriteLine "Vision toolkit setup" "DarkGreen" -:: Ensure Python Exists -call :Write "Checking for Python 3.7..." -python --version | find "3.7" > NUL -if errorlevel 1 goto errorNoPython -call :WriteLine "present" "Green" +:: The name of the dir containing the Deepstack Vision modules +set moduleDir=DeepStack -if "%verbosity%"=="loud" where Python +:: The full path of the Deepstack Vision modules +set modulePath=%analysisLayerPath%\%moduleDir% +:: The name of the dir containing the AI models themselves +set moduleAssetsDir=assets -:: ============================================================================ -:: 3a. Install PIP packages for Python analysis services +:: The name of the file in our S3 bucket containing the assets required for this module +set modelsAssetFilename=models.zip -call :Write "Installing Python package manager..." -python -m pip install --trusted-host pypi.python.org ^ - --trusted-host files.pythonhosted.org ^ - --trusted-host pypi.org --upgrade pip !pipFlags! -call :WriteLine "Done" "Green" +:: Install python and the required dependencies +call :SetupPython 3.7 +call :InstallPythonPackages 3.7 "%modulePath%\intelligencelayer\requirements.txt" "torch" -call :Write "Checking for required packages..." +:: Clean up directories to force a download and re-copy if necessary +if /i "%forceOverwrite%" == "true" ( + REM Force Re-download, then force re-copy of downloads to install dir + if exist "%downloadPath%\%moduleDir%" rmdir /s %rmdirFlags% "%downloadPath%\%moduleDir%" + if exist "%modulePath%\%moduleAssetsDir%" rmdir /s %rmdirFlags% "%modulePath%\%moduleAssetsDir%" +) -:: ASSUMPTION: If venv\Lib\site-packages\torch exists then no need to do this -if not exist "!VIRTUAL_ENV!\Lib\site-packages\torch" ( +if not exist "%modulePath%\%moduleAssetsDir%" ( + call :Download "%storageUrl%" "%downloadPath%\" "%modelsAssetFilename%" "%moduleDir%" ^ + "Downloading Vision models..." + if exist "%downloadPath%\%moduleDir%" ( + robocopy /e "%downloadPath%\%moduleDir% " "%modulePath%\%moduleAssetsDir% " !roboCopyFlags! > NUL + ) +) - call :WriteLine "Installing" "Yellow" +:: Deepstack needs these to store temp and pesrsisted data +if not exist "%modulePath%\tempstore\" mkdir "%modulePath%\tempstore" +if not exist "%modulePath%\datastore\" mkdir "%modulePath%\datastore" - REM call :Write "Installing Packages into Virtual Environment..." - REM pip install -r %deepStackPath%\%intelligenceDir%\requirements.txt !pipFlags! - REM call :WriteLine "Success" "Green" - REM We'll do this the long way so we can see some progress +:: For CodeProject's YOLO ObjectDetector ::::::::::::::::::::::::::::::::::::::::::::: - set currentOption= - for /f "tokens=*" %%x in (' more ^< "%deepStackPath%\%intelligenceDir%\requirements.txt" ') do ( - set line=%%x +call :WriteLine +call :WriteLine "Object Detector setup" "DarkGreen" - if "!line!" == "" ( - set currentOption= - ) else if "!line:~0,2!" == "##" ( - set currentOption= - ) else if "!line:~0,2!" == "#!" ( - set currentOption= - ) else if "!line:~0,12!" == "--find-links" ( - set currentOption=!line! - ) else ( - - REM breakup line into module name and description - set module=!line! - for /F "tokens=1,2 delims=#" %%a in ("!line!") do ( - set module=%%a - set description=%%b - ) +:: The name of the dir containing the Object Detector module. Yes, some brevity here would be good +set moduleDir=CodeProject.SenseAI.AnalysisLayer.Yolo - if "!description!" == "" set description=Installing !module! +:: The full path of the Object Detector module +set modulePath=%analysisLayerPath%\%moduleDir% - if "!module!" NEQ "" ( - call :Write " -!description!..." +:: The name of the dir containing the AI models themselves +set moduleAssetsDir=assets - if /i "%verbosity%" == "quiet" ( - python.exe -m pip install !module! !currentOption! !pipFlags! >nul 2>nul - ) else ( - python.exe -m pip install !module! !currentOption! !pipFlags! - ) +:: The name of the file in our S3 bucket containing the assets required for this module +set modelsAssetFilename=yolonet-models.zip - call :WriteLine "Done" "Green" - ) +:: Clean up directories to force a download and re-copy if necessary +if /i "%forceOverwrite%" == "true" ( + REM Force Re-download, then force re-copy of downloads to install dir + if exist "%downloadPath%\%moduleDir%" rmdir /s %rmdirFlags% "%downloadPath%\%moduleDir%" + if exist "%modulePath%\%moduleAssetsDir%" rmdir /s %rmdirFlags% "%modulePath%\%moduleAssetsDir%" +) - set currentOption= - ) +if not exist "%modulePath%\%moduleAssetsDir%" ( + call :Download "%storageUrl%" "%downloadPath%\" "%modelsAssetFilename%" "%moduleDir%" ^ + "Downloading Vision models..." + if exist "%downloadPath%\%moduleDir%" ( + robocopy /e "%downloadPath%\%moduleDir% " "%modulePath%\%moduleAssetsDir% " !roboCopyFlags! > NUL ) -) else ( - call :WriteLine "present." "Green" ) -:: ============================================================================ -:: 3b. Install PIP packages for TextSummary - -call :Write "Installing required Text Processing packages..." -pip install -r "%textSummaryPath%\requirements.txt" !pipFlags! -call :WriteLine "Success" "Green" +call :WriteLine +call :WriteLine "Modules and models downloaded" "Green" :: ============================================================================ @@ -513,6 +452,7 @@ goto:eof REM -OutFile !downloadPath!!dirToSave!.zip REM Be careful with the quotes so we can handle paths with spaces + REM call :Write "Start-BitsTransfer -Source '!storageUrl!!fileToGet!' -Destination '!downloadToDir!!dirToSave!.zip' ..." "White" powershell -command "Start-BitsTransfer -Source '!storageUrl!!fileToGet!' -Destination '!downloadToDir!!dirToSave!.zip'" if errorlevel 1 ( @@ -551,6 +491,160 @@ goto:eof exit /b +:SetupPython + SetLocal EnableDelayedExpansion + + set pythonVersion=%1 + + REM Version with ".'s removed + set pythonName=python!pythonVersion:.=! + + set installPath=!analysisLayerPath!\bin\!platform!\!pythonName! + + if /i "%forceOverwrite%" == "true" ( + REM Force Re-download + if exist "!downloadPath!\!platform!\!pythonName!" ( + rmdir /s "%rmdirFlags% "!downloadPath!\!platform!\!pythonName!" + ) + + REM Force overwrite + if exist "!installPath!" rmdir /s %rmdirFlags% "!installPath!" + ) + + REM Download whatever packages are missing + if exist "!installPath!" ( + call :WriteLine "!pythonName! package already downloaded" "DarkGray" + ) else ( + set baseDir=!downloadPath!\!platform!\ + if not exist "!baseDir!" mkdir "!baseDir!" + if not exist "!installPath!" ( + call :Download "%storageUrl%" "!baseDir!" "!pythonName!.zip" "!pythonName!" "Downloading Python !pythonVersion! interpreter..." + if exist "!downloadPath!\!platform!\!pythonName!" ( + robocopy /e "!downloadPath!\!platform!\!pythonName! " "!installPath! " !roboCopyFlags! > NUL + ) + ) + ) + + call :Write "Creating Virtual Environment..." + if exist "!installPath!\venv" ( + call :WriteLine "Python !pythonVersion! Already present" "Green" + ) else ( + "!installPath!\python.exe" -m venv "!installPath!\venv" + call :WriteLine "Done" "Green" + ) + + call :Write "Enabling our Virtual Environment..." + pushd "!installPath!" + + set venvPath=%cd%\venv + set pythonInterpreterPath="!venvPath!\Scripts\python" + + popd + + call :WriteLine "Done" "Green" + + rem Ensure Python Exists + call :Write "Confirming we have Python !pythonVersion!..." + !pythonInterpreterPath! --version | find "!pythonVersion!" > NUL + if errorlevel 1 goto errorNoPython + call :WriteLine "present" "Green" + + exit /b + +:InstallPythonPackages + + SetLocal EnableDelayedExpansion + + REM Whether or not to install all python packages in one step (-r requirements.txt) or step by + REM step. Doing this allows the PIP manager to handle incompatibilities better. + set oneStepPIP=true + + set pythonVersion=%1 + set pythonName=python!pythonVersion:.=! + + set requirementsPath=%~2 + set testForPipExistanceName=%~3 + + set virtualEnv=!analysisLayerPath!\bin\!platform!\!pythonName!\venv + + rem This will be the python interpreter in the virtual env + set pythonPath=!virtualEnv!\Scripts\python + + rem ============================================================================ + rem 3a. Install PIP packages for Python analysis services + + call :Write "Installing Python package manager..." + !pythonPath! -m pip install --trusted-host pypi.python.org ^ + --trusted-host files.pythonhosted.org ^ + --trusted-host pypi.org --upgrade pip !pipFlags! + call :WriteLine "Done" "Green" + + call :Write "Checking for required packages..." + + rem ASSUMPTION: If venv\Lib\site-packages\ exists then no need to check further + if not exist "!virtualEnv!\Lib\site-packages\!testForPipExistanceName!" ( + + call :WriteLine "Packages missing. Installing..." "Yellow" + + if "!oneStepPIP!" == "true" ( + + call :Write "Installing Packages into Virtual Environment..." + REM pip install -r !requirementsPath! !pipFlags! + !pythonPath! -m pip install -r !requirementsPath! !pipFlags! + call :WriteLine "Success" "Green" + + ) else ( + + REM We'll do this the long way so we can see some progress + + set currentOption= + for /f "tokens=*" %%x in (' more ^< "!requirementsPath!" ') do ( + set line=%%x + + if "!line!" == "" ( + set currentOption= + ) else if "!line:~0,2!" == "##" ( + set currentOption= + ) else if "!line:~0,8!" == "# Python" ( REM Note: It's actually #! Python in the file. + set currentOption= + ) else if "!line:~0,12!" == "--find-links" ( + set currentOption=!line! + ) else ( + + REM breakup line into module name and description + set module=!line! + for /F "tokens=1,2 delims=#" %%a in ("!line!") do ( + set module=%%a + set description=%%b + ) + + if "!description!" == "" set description=Installing !module! + + if "!module!" NEQ "" ( + call :Write " -!description!..." + + if /i "%verbosity%" == "quiet" ( + !pythonPath! -m pip install !module! !currentOption! !pipFlags! >nul 2>nul + ) else ( + !pythonPath! -m pip install !module! !currentOption! !pipFlags! + ) + + call :WriteLine "Done" "Green" + ) + + set currentOption= + ) + ) + + ) + + ) else ( + call :WriteLine "present." "Green" + ) + + exit /b + + :: Jump points :errorNoPython @@ -558,4 +652,5 @@ call :WriteLine call :WriteLine call :WriteLine "-------------------------------------------------------" call :WriteLine "Error: Python not installed" "Red" -goto:eof +goto:EOF +exit diff --git a/Installers/Old School/CreateWindowsService.cmd b/Installers/Old School/CreateWindowsService.cmd deleted file mode 100644 index 5f186ee7..00000000 --- a/Installers/Old School/CreateWindowsService.cmd +++ /dev/null @@ -1,6 +0,0 @@ -Rem CreateWindowsService.ps1 - -sc.exe create "CodeProject SenseAI Server" binPath= "%CD%\CodeProject.SenseAI.Server.exe --urls http://*:5000 --environment Production" start= auto -sc.exe description "CodeProject SenseAI Server" "A Service hosting the CodeProject SenseAI WebAPI for face detection and recognition, object detection, and scene classification." -sc.exe failure "CodeProject SenseAI Server" reset= 30 actions= restart/5000/restart/5000/restart/5000 -sc.exe start "CodeProject SenseAI Server" \ No newline at end of file diff --git a/Installers/Old School/DeleteWindowsService.cmd b/Installers/Old School/DeleteWindowsService.cmd deleted file mode 100644 index 3bbb4d51..00000000 --- a/Installers/Old School/DeleteWindowsService.cmd +++ /dev/null @@ -1,4 +0,0 @@ -Rem DeleteWindowsService.ps1 - -sc.exe stop "CodeProject SenseAI Server" -sc.exe delete "CodeProject SenseAI Server" diff --git a/Installers/Old School/Setup_SenseAI_Win.bat b/Installers/Old School/Setup_SenseAI_Win.bat deleted file mode 100644 index 91718f55..00000000 --- a/Installers/Old School/Setup_SenseAI_Win.bat +++ /dev/null @@ -1,401 +0,0 @@ -:: ============================================================================ -:: -:: CodeProject SenseAI Server Installation Setup script -:: -:: Run this to SETUP CodeProject SenseAI -:: -:: WELCOME TO CODEPROJECT SENSEAI! -:: -:: Please ensure this script is run before you start exploring and integrating -:: CodeProject.SenseAI. This script assumes the front end API server is already -:: in place, and will finish up the setup by performing any tasks required by -:: the various backend analysis services. -:: -:: Grab a coffee because it could take a while. But it'll be worth it. -:: -:: Current backend analysis services installed: -:: -:: - Face Detection & recognition -:: - Object Detection -:: - Scene Detection -:: -:: ============================================================================ - - -@echo off -cls -SETLOCAL EnableDelayedExpansion - -:: ---------------------------------------------------------------------------- -:: Script settings. - -:: The name of the source directory containing the Frontend API and backend analysis services -set srcDir=src - -:: Show output in wild, crazy colours -set techniColor=true - -:: verbosity can be: quiet | info | loud -set verbosity=quiet - -:: The name of the dir holding the backend analysis services (within the src dir) -set analysisLayerDir=AnalysisLayer - -if /i "%techniColor%" == "true" call :setESC - - -:: move down 8 lines so the download/unzip progress doesn't hide the output.' -call :WriteLine Yellow "Setting up CodeProject.SenseAI" -call :WriteLine White "" -call :WriteLine White "========================================================================" -call :WriteLine White "" -call :WriteLine White " CodeProject SenseAI Installer" -call :WriteLine White "" -call :WriteLine White "========================================================================" -call :WriteLine White "" - - - -:: ---------------------------------------------------------------------------- -:: MODULES (from DeepStack) -:: - Face Detection & recognition -:: - Object Detection -:: - Scene Detection - -:: The location of large packages that need to be downloaded -:: a. From AWS -set storageUrl=https://codeproject-ai.s3.ca-central-1.amazonaws.com/sense/installer/ -:: b. From contrary GCP -rem set storageUrl=https://storage.googleapis.com/codeproject-senseai/ -:: c. Use a local directory rather than from online. Handy for debugging. -rem set storageUrl=C:\Dev\CodeProject\CodeProject.SenseAI\install\cached_downloads\ - -:: The name of the dir holding the DeepStack modules (within the analysis services dir) -set deepstackDir=DeepStack - -:: The name of the dir containing the Python interpreter (within the deepstack dir) -set pythonDir=python37 - -:: The name of the dir containing the AI models themselves (within the deepstack dir) -set deepStackModelsDir=assets - -:: The name of the dir holding the DeepStack modules (within the analysis services dir) -set yoloNetDir=CodeProject.SenseAI.AnalysisLayer.Yolo - -:: The name of the dir containing the AI models themselves (within the yolo.net dir) -set yoloNetModelsDir=assets - -:: Set the noise level when installing Python packages -set pipFlags=-q -if /i "%verbosity%" == "info" set pipFlags= - -:: ---------------------------------------------------------------------------- -:: Download assets - -:: Setup backend analysis modules -set analysisPath=%srcDir%\%analysisLayerDir% - -:: Module 1: DeepStack -set deepStackPath=%cd%\%analysisPath%\%deepstackDir% - -:: Module 2: Yolo.net -set yoloNetPath=%cd%\%analysisPath%\%yoloNetDir% - -:: Warn the user about potential download size -set /a downloadSize=0 -if not exist "%deepStackPath%\%pythonDir%" set /a downloadSize=downloadSize+25 -if not exist "%deepStackPath%\%deepStackModelsDir%" set /a downloadSize=downloadSize+550 -if not exist "%yoloNetPath%\%yoloNetModelsDir%" set /a downloadSize=downloadSize+98 - -if !downloadSize! gtr 0 ( - choice /M "To continue I need to download !downloadSize!Mb of files. Is this OK" - if errorlevel 2 goto:eof -) - -:: Download required assets if needed -if exist "%deepStackPath%\%pythonDir%" ( - call :Write White "Checking Python interpreter..." - call :WriteLine Green "Present" -) else ( - call :Download "%storageUrl%" "%deepStackPath%\" python37.zip "%pythonDir%" ^ - "Downloading Python interpreter..." -) -if exist "%deepStackPath%\%deepStackModelsDir%" ( - call :Write White "Checking Module 1 AI Models..." - call :WriteLine Green "Present" -) else ( - call :Download "%storageUrl%" "%deepStackPath%\" models.zip "%deepStackModelsDir%" ^ - "Downloading Module 1 AI Models..." -) -if exist "%yoloNetPath%\%yoloNetModelsDir%" ( - call :Write White "Checking Module 2 AI Models..." - call :WriteLine Green "Present" -) else ( - call :Download "%storageUrl%" "%yoloNetPath%\" yolonet-models.zip "%yoloNetModelsDir%" ^ - "Downloading Module 2 AI Models..." -) - -:: ---------------------------------------------------------------------------- -:: Setup Python environment for this module (Requires Python 3.7) - -call :Write White "Creating a Python virtual environment..." - -:: Create the Virtual Environment -"%deepStackPath%\%pythonDir%\python" -m venv "%deepStackPath%\venv" - -:: Activate the Virtual Environment (so we can install packages) -set VIRTUAL_ENV=%deepStackPath%\venv\Scripts - -if not defined PROMPT set PROMPT=$P$G -set PROMPT=(venv) !PROMPT! - -set PYTHONHOME= -set PATH=%VIRTUAL_ENV%;%PATH% -call :WriteLine Green "Done" - -:: Install required packages -if not exist "%deepStackPath%\venv\Lib\site-packages\torch" ( - call :WriteLine White "Installing required Python packages." - - call :Write White " - Installing Python package manager..." - if /i "%verbosity%" == "quiet" ( - python -m pip install --trusted-host pypi.python.org ^ - --trusted-host files.pythonhosted.org ^ - --trusted-host pypi.org --upgrade pip !pipFlags! >nul 2>nul - ) else ( - python -m pip install --trusted-host pypi.python.org ^ - --trusted-host files.pythonhosted.org ^ - --trusted-host pypi.org --upgrade pip !pipFlags! - ) - - call :WriteLine Green "Done" - - REM This is the easy way, but doesn't provide any feedback - REM pip install -r %deepStackPath%\%intelligenceDir%\requirements.txt !pipFlags! - REM call :WriteLine Green "Success" - - REM We'll do this the long way so we can see some progress - set currentOption= - - REM for the odd syntax see https://stackoverflow.com/a/22636725 - REM for /f "tokens=*" %%x in (""%deepStackPath%\IntelligenceLayer\requirements.txt"") do ( - - for /f "tokens=*" %%x in (' more ^< "%deepStackPath%\IntelligenceLayer\requirements.txt" ') do ( - - set line=%%x - - if "!line!" == "" ( - set currentOption= - ) else if "!line:~0,2!" == "##" ( - set currentOption= - ) else if "!line:~0,12!" == "--find-links" ( - set currentOption=!line! - ) else ( - - set module=!line! - for /F "tokens=1,2 delims=#" %%a in ("!line!") do ( - set module=%%a - set description=%%b - ) - - if "!description!" == "" set description=Installing !module! - if "!module!" NEQ "" ( - call :Write White " -!description!..." - - if /i "%verbosity%" == "quiet" ( - python.exe -m pip install !module! !currentOption! !pipFlags! >nul 2>nul - ) else ( - python.exe -m pip install !module! !currentOption! !pipFlags! - ) - - call :WriteLine Green "Done" - ) - - set currentOption= - ) - ) - call :WriteLine Green "All packages installed." -) else ( - call :WriteLine Green "Python packages already present." -) - -:: END OF MODULES - - -:: ---------------------------------------------------------------------------- -:: All done! - -call :WriteLine Green "" -call :WriteLine Green "" -call :Write Green "Setup Complete. Please Run" -call :Write Yellow " Start_SenseAI_Win.bat" -call :WriteLine Green " to start the server." -rem start "" ./Welcome.html - - -goto:eof - - - -:: ============================================================================ -:: ============================================================================ - - -:: sub-routines - -:setESC - for /F "tokens=1,2 delims=#" %%a in ('"prompt #$H#$E# & echo on & for %%b in (1) do rem"') do ( - set ESC=%%b - exit /B 0 - ) - exit /B 0 - -:setColor - REM echo %ESC%[4m - Underline - REM echo %ESC%[7m - Inverse - - if /i "%2" == "foreground" ( - REM Foreground Colours - if /i "%1" == "Black" set currentColor=!ESC![30m - if /i "%1" == "DarkRed" set currentColor=!ESC![31m - if /i "%1" == "DarkGreen" set currentColor=!ESC![32m - if /i "%1" == "DarkYellow" set currentColor=!ESC![33m - if /i "%1" == "DarkBlue" set currentColor=!ESC![34m - if /i "%1" == "DarkMagenta" set currentColor=!ESC![35m - if /i "%1" == "DarkCyan" set currentColor=!ESC![36m - if /i "%1" == "Gray" set currentColor=!ESC![37m - if /i "%1" == "DarkGray" set currentColor=!ESC![90m - if /i "%1" == "Red" set currentColor=!ESC![91m - if /i "%1" == "Green" set currentColor=!ESC![92m - if /i "%1" == "Yellow" set currentColor=!ESC![93m - if /i "%1" == "Blue" set currentColor=!ESC![94m - if /i "%1" == "Magenta" set currentColor=!ESC![95m - if /i "%1" == "Cyan" set currentColor=!ESC![96m - if /i "%1" == "White" set currentColor=!ESC![97m - ) else ( - REM Background Colours - if /i "%1" == "Black" set currentColor=!ESC![40m - if /i "%1" == "DarkRed" set currentColor=!ESC![41m - if /i "%1" == "DarkGreen" set currentColor=!ESC![42m - if /i "%1" == "DarkYellow" set currentColor=!ESC![43m - if /i "%1" == "DarkBlue" set currentColor=!ESC![44m - if /i "%1" == "DarkMagenta" set currentColor=!ESC![45m - if /i "%1" == "DarkCyan" set currentColor=!ESC![46m - if /i "%1" == "Gray" set currentColor=!ESC![47m - if /i "%1" == "DarkGray" set currentColor=!ESC![100m - if /i "%1" == "Red" set currentColor=!ESC![101m - if /i "%1" == "Green" set currentColor=!ESC![102m - if /i "%1" == "Yellow" set currentColor=!ESC![103m - if /i "%1" == "Blue" set currentColor=!ESC![104m - if /i "%1" == "Magenta" set currentColor=!ESC![105m - if /i "%1" == "Cyan" set currentColor=!ESC![106m - if /i "%1" == "White" set currentColor=!ESC![107m - ) - exit /B 0 - -:WriteLine - SetLocal EnableDelayedExpansion - set resetColor=!ESC![0m - - set str=%~2 - - if "!str!" == "" ( - Echo: - exit /b 0 - ) - - if /i "%techniColor%" == "true" ( - REM powershell write-host -foregroundcolor %1 !str! - call :setColor %1 foreground - echo !currentColor!!str!!resetColor! - ) else ( - Echo !str! - ) - exit /b 0 - -:Write - - set str=%~2 - - if "!str!" == "" exit /b 0 - - SetLocal EnableDelayedExpansion - set resetColor=!ESC![0m - - if /i "%techniColor%" == "true" ( - REM powershell write-host -foregroundcolor %1 -NoNewline !str! - call :setColor %1 foreground - nul 2>nul - pushd "!dirToSave!" - tar -xf "!dirToSave!.zip" > nul 2>nul - if "%errorlevel%" == "9009" set tarExists=false - rm "!dirToSave!.zip" > nul 2>nul - popd - popd - - if "!tarExists!" == "false" ( - powershell Expand-Archive -Path "!downloadToDir!!dirToSave!.zip" ^ - -DestinationPath "!downloadToDir!" -Force - ) - - del /s /f /q "!downloadToDir!!dirToSave!.zip" > nul - - call :WriteLine Green "Done." - - exit /b diff --git a/Installers/Old School/Start_SenseAI_Win.bat b/Installers/Old School/Start_SenseAI_Win.bat deleted file mode 100644 index c9fa6b8c..00000000 --- a/Installers/Old School/Start_SenseAI_Win.bat +++ /dev/null @@ -1,311 +0,0 @@ -:: ============================================================================ -:: -:: CodeProject SenseAI Server Startup script -:: -:: Run this to START CodeProject SenseAI -:: -:: Please ensure you have run Setup_SenseAI_Win.bat before you run this script. -:: Setup_SenseAI_Win will download everything you need and setup the Environment -:: while this script will start up the CodeProject SenseAI API server and -:: backend analysis services. -:: -:: ============================================================================ - -:: CodeProject SenseAI Server startup script -:: We assume we're in the CodeProject SenseAI installed directory. - -@echo off -cls -SETLOCAL EnableDelayedExpansion - -:: Basic Settings - -:: verbosity can be: quiet | info | loud -set verbosity=quiet - -:: The name of the CodeProject Sense App Executable file -set appExe=CodeProject.SenseAI.Server.exe - -:: Can be Debug or Release -set config=Release - -:: The target platform -set platform=net5.0 - -:: The name of the Environment variable setup file -set settingsFile=CodeProject.SenseAI.json - -:: Show output in wild, crazy colours -set techniColor=true - -:: Debug overrides -rem set verbosity=info -rem set config=Debug - - -:: ---------------------------------------------------------------------------- -:: Set Flags - -set dotnetFlags=q -if "%verbosity%"=="info" set dotnetFlags=m -if "%verbosity%"=="loud" set dotnetFlags=n - -if /i "%techniColor%" == "true" call :setESC - -call :WriteLine Yellow "Preparing CodeProject.SenseAI Server" - - -:: Before we start, let's set the root directory. -:: Also setup the required environment variables. -:: Doing the path calculations here so app will run if install directory -:: renamed or moved in addition to running from a non-default directory. - -:: ============================================================================ -:: 1. Load Installation settings - -call :Write White "Loading installation settings..." -( - for /f "tokens=*" %%x in (' more ^< "%settingsFile%" ') do ( - set line=%%x - rem remove quotes, change " : " to "=", remove spaces - set line=!line:"=! - set line=!line: : ==! - set line=!line: =! - if not "!line:~0,1!" == "{" ( - if not "!line:~0,1!" == "}" ( - if "!line:~-1!" == "," set line=!line:~0,-1! - echo set !line! - ) - ) - - ) -) > "!settingsFile!.bat" -call !settingsFile!.bat -del !settingsFile!.bat -call :WriteLine Green "Done" - -:: In case the installation has been moved to a different directory after installation -:: CodeProject SenseAI API Server -set CPSENSEAI_ROOTDIR=%cd% -:: Modules: DeepStack specific -set APPDIR=!CPSENSEAI_ROOTDIR!\!CPSENSEAI_APPDIR!\!CPSENSEAI_ANALYSISDIR!\DeepStack\intelligencelayer -set DATA_DIR=!CPSENSEAI_ROOTDIR!\!CPSENSEAI_APPDIR!\!CPSENSEAI_ANALYSISDIR!\DeepStack\datastore -set TEMP_PATH=!CPSENSEAI_ROOTDIR!\!CPSENSEAI_APPDIR!\!CPSENSEAI_ANALYSISDIR!\DeepStack\tempstore -set MODELS_DIR=!CPSENSEAI_ROOTDIR!\!CPSENSEAI_APPDIR!\!CPSENSEAI_ANALYSISDIR!\DeepStack\assets -:: Modules: CodeProject YOLO -:: ... - - -if "%verbosity%" NEQ "quiet" ( - call :WriteLine Yellow "Environment variable summary" - call :WriteLine Yellow "CodeProject SenseAI" - call :WriteLine DarkGreen " CPSENSEAI_ROOTDIR = !CPSENSEAI_ROOTDIR! - call :WriteLine DarkGreen " CPSENSEAI_APPDIR = !CPSENSEAI_APPDIR! - call :WriteLine DarkGreen " CPSENSEAI_APIDIR = !CPSENSEAI_APIDIR! - call :WriteLine DarkGreen " CPSENSEAI_ANALYSISDIR = !CPSENSEAI_ANALYSISDIR! - call :WriteLine DarkGreen " CPSENSEAI_PORT = !PORT! - call :WriteLine DarkGreen " CPSENSEAI_PROFILE = !PROFILE! - call :WriteLine DarkGreen " CPSENSEAI_PRODUCTION = !CPSENSEAI_PRODUCTION! - call :WriteLine DarkGreen " CPSENSEAI_CONFIG = !CPSENSEAI_CONFIG! - call :WriteLine DarkGreen " CPSENSEAI_BUILDSERVER = !CPSENSEAI_BUILDSERVER! - call :WriteLine Yellow "Module: DeepStack" - call :WriteLine DarkGreen " APPDIR = !APPDIR!" - call :WriteLine DarkGreen " PROFILE = !PROFILE!" - call :WriteLine DarkGreen " CUDA_MODE = !CUDA_MODE!" - call :WriteLine DarkGreen " DATA_DIR = !DATA_DIR!" - call :WriteLine DarkGreen " TEMP_PATH = !TEMP_PATH!" - call :WriteLine DarkGreen " PORT = !PORT!" - REM call :WriteLine Yellow "Module: CodeProject YOLO" - REM call :WriteLine DarkGreen ... -) - -:: ============================================================================ -:: 2. Activate Virtual Environment - -call :Write White "Enabling our Virtual Environment..." - -set deepstackDir=!CPSENSEAI_ROOTDIR!\!CPSENSEAI_APPDIR!\!CPSENSEAI_ANALYSISDIR!\DeepStack - -:: Rewrite the pyvenv.cfg to point to the correct, absolute, python directory. -:: This may have changed if this folder has been moved. See also the discussion -:: https://bugs.python.org/issue39469 - Support for relative home path in pyvenv -( -echo home = !deepstackDir!\python37 -echo include-system-site-packages = false -echo version = 3.7.9 -) > "!deepstackDir!\venv\pyvenv.cfg" - -:: Activate the Virtual Environment -set VIRTUAL_ENV=%deepstackDir%\venv\Scripts - -if not defined PROMPT set PROMPT=$P$G -set PROMPT=(venv) !PROMPT! - -set PYTHONHOME= -set PATH=!VIRTUAL_ENV!;%PATH% - -if errorlevel 1 goto errorNoPythonVenv -call :WriteLine Green "Done" - -:: Ensure Python Exists -call :Write White "Checking for Python 3.7..." -python --version | find "3.7" > NUL -if errorlevel 1 goto errorNoPython -call :WriteLine Green "present" - -if "%verbosity%"=="loud" where Python.exe - -:: ============================================================================ -:: 3. Start front end server - -:: In an installed, Production version of SenseAI, the server exe sits directly -:: in the /src/Server/Frontend folder. For the development environment the -:: server exe is in /src/Server/Frontend/bin/Debug/... folder. Hence we need to -:: update the location of the main executable. - -cd !CPSENSEAI_ROOTDIR!\!CPSENSEAI_APPDIR!\!CPSENSEAI_APIDIR!\Server\FrontEnd - -if /i "!CPSENSEAI_PRODUCTION!" == "true" ( - - set CPSENSEAI_BUILDSERVER=False - Set CPSENSEAI_CONFIG=Release - -) else ( - - if /i "!CPSENSEAI_BUILDSERVER!" == "true" ( - - cd "!appFolder!" > nul - - if "%verbosity%"=="quiet" ( - dotnet build --configuration !CPSENSEAI_CONFIG! --nologo --verbosity !dotnetFlags! > nul - ) else ( - dotnet build --configuration !CPSENSEAI_CONFIG! --nologo --verbosity !dotnetFlags! - ) - - REM Head down to the dev version of the exe. - set appExe=bin\!platform!\!CPSENSEAI_CONFIG!\!platform!\win-x86\!appExe! - ) -) - -call :WriteLine Yellow "Launching CodeProject.SenseAI Server" - -"!appExe!" --urls http://*:%port% - -:: Pause and let backend services catch up (to be controlled via messages soon) -if "%startPythonDirectly %" == "true" Timeout /T 5 /NOBREAK >nul 2>nul - -call :WriteLine Green "CodeProject.SenseAI Server is now live" - - -:: and we're done. -goto:eof - - -:: sub-routines - -:setESC - for /F "tokens=1,2 delims=#" %%a in ('"prompt #$H#$E# & echo on & for %%b in (1) do rem"') do ( - set ESC=%%b - exit /B 0 - ) - exit /B 0 - -:setColor - REM echo %ESC%[4m - Underline - REM echo %ESC%[7m - Inverse - - if /i "%2" == "foreground" ( - REM Foreground Colours - if /i "%1" == "Black" set currentColor=!ESC![30m - if /i "%1" == "DarkRed" set currentColor=!ESC![31m - if /i "%1" == "DarkGreen" set currentColor=!ESC![32m - if /i "%1" == "DarkYellow" set currentColor=!ESC![33m - if /i "%1" == "DarkBlue" set currentColor=!ESC![34m - if /i "%1" == "DarkMagenta" set currentColor=!ESC![35m - if /i "%1" == "DarkCyan" set currentColor=!ESC![36m - if /i "%1" == "Gray" set currentColor=!ESC![37m - if /i "%1" == "DarkGray" set currentColor=!ESC![90m - if /i "%1" == "Red" set currentColor=!ESC![91m - if /i "%1" == "Green" set currentColor=!ESC![92m - if /i "%1" == "Yellow" set currentColor=!ESC![93m - if /i "%1" == "Blue" set currentColor=!ESC![94m - if /i "%1" == "Magenta" set currentColor=!ESC![95m - if /i "%1" == "Cyan" set currentColor=!ESC![96m - if /i "%1" == "White" set currentColor=!ESC![97m - ) else ( - REM Background Colours - if /i "%1" == "Black" set currentColor=!ESC![40m - if /i "%1" == "DarkRed" set currentColor=!ESC![41m - if /i "%1" == "DarkGreen" set currentColor=!ESC![42m - if /i "%1" == "DarkYellow" set currentColor=!ESC![43m - if /i "%1" == "DarkBlue" set currentColor=!ESC![44m - if /i "%1" == "DarkMagenta" set currentColor=!ESC![45m - if /i "%1" == "DarkCyan" set currentColor=!ESC![46m - if /i "%1" == "Gray" set currentColor=!ESC![47m - if /i "%1" == "DarkGray" set currentColor=!ESC![100m - if /i "%1" == "Red" set currentColor=!ESC![101m - if /i "%1" == "Green" set currentColor=!ESC![102m - if /i "%1" == "Yellow" set currentColor=!ESC![103m - if /i "%1" == "Blue" set currentColor=!ESC![104m - if /i "%1" == "Magenta" set currentColor=!ESC![105m - if /i "%1" == "Cyan" set currentColor=!ESC![106m - if /i "%1" == "White" set currentColor=!ESC![107m - ) - exit /B 0 - -:WriteLine - SetLocal EnableDelayedExpansion - set resetColor=!ESC![0m - - set str=%~2 - - if "!str!" == "" ( - Echo: - exit /b 0 - ) - - if /i "%techniColor%" == "true" ( - REM powershell write-host -foregroundcolor %1 !str! - call :setColor %1 foreground - echo !currentColor!!str!!resetColor! - ) else ( - Echo !str! - ) - exit /b 0 - -:Write - - set str=%~2 - - if "!str!" == "" exit /b 0 - - SetLocal EnableDelayedExpansion - set resetColor=!ESC![0m - - if /i "%techniColor%" == "true" ( - REM powershell write-host -foregroundcolor %1 -NoNewline !str! - call :setColor %1 foreground - ..zip -set installationPackage=c:\CodeProject.SenseAI.0.0201.zip - -:: The location of the solution root directory relative to this script -cd .. -set rootPath=%cd% - -:: The location of the API Frontend Server within the source tree itself. Note that the location of -:: the API server in the installation -set serverSrcPath=%rootPath%\src\API\Server\FrontEnd - -:: Whether or not to remove any existing installation directory -set cleanInstall=true - -:: Whether or not to compress the final installation package. Currently fails -:: due to access denied error -set compressInstallation=true - -:: Whether or not to remove the installation directory after it's been compressed -set removeInstallationFolder=true - -:: verbosity can be: quiet | info | loud -set verbosity=quiet - -:: Show output in wild, crazy colours -set techniColor=true - -:: SenseAI specific ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: - -:: The name of the installation dir holding the frontend API server -set senseAPIDir=API - -:: The name of the startup settings file -set settingsFile=CodeProject.SenseAI.json - -:: The name of the version file for the SenseAI system. Stored in the API Server dir. -:: TODO: Read this file to set the value of %installationPackage% -set versionFile=version.json - -:: .NET build configuration: [Debug | Release] -set config=Release - -:: Where to put the Builds -set buildOutputDir=bin\InstallPackage - -:: DeepStack specific ::::::::::::::::::::::::::::::::::::::::::::::::::::::::: - -:: The name of the dir holding the DeepStack analysis services -set deepstackDir=DeepStack - -:: The name of the dir containing the Python code itself -set intelligenceDir=intelligencelayer - -:: The name of the dir containing the AI models themselves -set modelsDir=assets - -:: The name of the dir containing persisted DeepStack data -set datastoreDir=datastore - -:: The name of the dir containing temporary DeepStack data -set tempstoreDir=tempstore - -:: Yolo.Net specific -:: The location of the Yolo.Net Module within the source tree itself. -set yoloNetSrcPath=%rootPath%\src\AnalysisLayer\CodeProject.SenseAI.AnalysisLayer.Yolo -set yoloNetDir=CodeProject.SenseAI.AnalysisLayer.Yolo -set yoloModelsDir=assets\weights - -:: Shared ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: - -:: The name of the dir, within the current directory, where install assets will -:: be downloaded -set downloadDir=downloads - -:: The name of the dir containing the Python interpreter -set pythonDir=python37 - -:: The name of the installation source directory -set srcDir=src - -:: The name of the dir holding the backend analysis services -set analysisLayerDir=AnalysisLayer - -:: The name of the demos directory -set demoDir=demos - -:: For Debug -rem set cleanInstall=false -rem set compressInstallation=false -rem set verbosity=info - -:: ---------------------------------------------------------------------------- -:: Set Flags and misc. - -set roboCopyFlags=/NFL /NDL /NJH /NJS /nc /ns >nul 2>nul -set dotnetFlags=q - -if "%verbosity%"=="info" set dotnetFlags=m -if "%verbosity%"=="loud" set roboCopyFlags=/NFL /NDL /nc /ns -if "%verbosity%"=="loud" set dotnetFlags=n - -if /i "%techniColor%" == "true" call :setESC - -:: ---------------------------------------------------------------------------- -:: 1. Ensure correct permissions before we start - -:: Only need elevated permissions if we are compressing the install -if /i "%compressInstallation%" NEQ "true" goto hasCorrectPermissions - -:: We're compressing, so let's ensure we have permissions to do this -call :Write DarkYellow "Checking administrative privileges..." - -set hasAdmin=true -for /f "tokens=*" %%O in ('FSUTIL dirty query %SystemDrive% 2^> nul') do ( - set line=%%O - if "!line:denied=!" NEQ "!line!" set hasAdmin=false -) -if /i "!hasAdmin!" == "true" goto hasCorrectPermissions - -:: ---------------------------------------------------------------------------- -:: We don't have permissions needed. We can try and restart this script with -:: correct permissions, or we can just tell the user to restart in admin mode. -:: The latter is way less hassle. - -set attemptRestart=false - -if /i "%attemptRestart%" == "true" ( - REM Get the details of this script so we can relaunch it - Set _batchFile=%~f0 - Set _Args=%* - - REM remove any quotes and add them back later (VBScript needs quotes doubled) - Set _batchFile=!_batchFile:"=! - - REM Create and run a temporary VBScript to elevate this batch file - del /s /f /q "%temp%\~ElevateMe.vbs" > nul - ( - Echo Dim UAC : Set UAC = CreateObject^("Shell.Application"^) - Echo UAC.ShellExecute "cmd", "/c ""!_batchFile! !_Args!"" ", "", "runas", 1 - ) > "%temp%\~ElevateMe.vbs" - more "%temp%\~ElevateMe.vbs" - cscript "%temp%\~ElevateMe.vbs" - Exit /B -) else ( - call :WriteLine Red "Insufficient privliges" - call :WriteLine DarkYellow "To compress the installation package please restart this script in admin mode" - goto:eof -) - -:hasCorrectPermissions -call :WriteLine Green "Success" - -:: ============================================================================ -:: We've hit the point where we need to consider the .NET version installed - -call :Write White "Checking version of .NET..." -dotnet --info | find "Version: 6" > NUL -if errorlevel 1 goto errorNoNET6 -call :WriteLine Green "NET 6 present" - - -:: ============================================================================ -:: 2. Do the heavy lifting of ensuring the dev environment is setup so we can -:: just do a bunch of copies - -pushd install -call setup_dev_env_win.bat %techniColor% -popd -if errorlevel 1 goto:eof - -call :WriteLine Yellow "Creating CodeProject.SenseAI Installation Package" - - -:: ============================================================================ -:: 3. Remove the install folder if it exists - -if /i "!cleanInstall!" == "true" ( - call :Write White "Ensuring the installation folder is reset and clean..." - if exist "%installationDir%" rd "%installationDir%" /S /Q > NUL - call :WriteLine Green "Done." -) - -:: ============================================================================ -:: 4. Ensure directories are created - -:: Create some directories -call :Write White "Creating Directories..." - -if not exist "%installationDir%" mkdir "%installationDir%" -if not exist "%installationDir%\%srcDir%\%analysisLayerDir%" mkdir "%installationDir%\%srcDir%\%analysisLayerDir%" - -:: For CodeProject.SenseAI -set senseAIInstallPath=%installationDir%\%srcDir%\%senseAPIDir% -if not exist "%senseAIInstallPath%" mkdir "%senseAIInstallPath%" - -:: For DeepStack -:: DeepStack gets copied over in one fell swoop. No need to create dirs - -call :WriteLine Green "Done" - - -:: ============================================================================ -:: 5. Copy over the server code, models and virtual environment - -:: For CodeProject.SenseAI - -:: Build server -call :Write White "Building API Server [%config%]..." - -pushd "%serverSrcPath%" -REM Note: The carrot character means "line continuation" -REM For .NET 6 add the "-p:PublishSingleFile=true" parameter to make the output a single file -if /i "%verbosity%"=="quiet" ( - dotnet publish --configuration %config% -p:PublishProfile=WinInstallPackage ^ - -o "%buildOutputDir%" --nologo --verbosity !dotnetFlags! > nul -) else ( - dotnet publish --configuration %config% -p:PublishProfile=WinInstallPackage ^ - -o "%buildOutputDir%" --nologo --verbosity !dotnetFlags! -) -popd -call :WriteLine Green "Done." - -:: Copy over -call :Write White "Moving API Server to installation folder..." -if /i "%verbosity%"=="quiet" ( - robocopy /E "%serverSrcPath%\%buildOutputDir% " ^ - "%installationDir%\%srcDir%\%senseAPIDir%\Server\FrontEnd " ^ - /XF *.pdb !roboCopyFlags! > nul -) else ( - robocopy /E "%serverSrcPath%\%buildOutputDir% " ^ - "%installationDir%\%srcDir%\%senseAPIDir%\Server\FrontEnd " ^ - /XF *.pdb !roboCopyFlags! -) -call :WriteLine Green "Done." - -call :Write White "Moving Analysis services to installation folder..." - -:: For DeepStack - -:: Note that Python and the models will be downloaded by the Setup script, and -:: venv will be generated dynamically by the same script -set analysisPath=%rootPath%\%srcDir%\%analysisLayerDir%\DeepStack\ -REM Note the space before the closing " https://stackoverflow.com/a/30244061 -if /i "%verbosity%"=="quiet" ( - robocopy "%analysisPath% " "%installationDir%\%srcDir%\%analysisLayerDir%\DeepStack " ^ - /XD venv __pycache__ "%modelsDir%" "%pythonDir%" /XF faceembedding.db /E !roboCopyFlags! > nul -) else ( - robocopy "%analysisPath% " "%installationDir%\%srcDir%\%analysisLayerDir%\DeepStack " ^ - /XD venv __pycache__ "%modelsDir%" "%pythonDir%" /XF faceembedding.db /E !roboCopyFlags! -) - -call :WriteLine Green "Done." - -:: For YOLO -call :Write White "Building Yolo.Net module [%config%]..." - -pushd "%yoloNetSrcPath%" -REM Note: The carrot character means "line continuation" -REM For .NET 6 add the "-p:PublishSingleFile=true" parameter to make the output a single file -if /i "%verbosity%"=="quiet" ( - dotnet publish --configuration %config% -p:PublishProfile=WinInstaller ^ - -o "%buildOutputDir%" --nologo --verbosity !dotnetFlags! > nul -) else ( - dotnet publish --configuration %config% -p:PublishProfile=WinInstaller ^ - -o "%buildOutputDir%" --nologo --verbosity !dotnetFlags! -) -popd -call :WriteLine Green "Done." - -:: Copy over -call :Write White "Moving Yolo.Net module to installation folder..." -REM there is a bug in NuGet that make .pdb files in native code manditory. -REM looking for a fix. In the mean time have to include the .pdb files -if /i "%verbosity%"=="quiet" ( - robocopy /E "%yoloNetSrcPath%\%buildOutputDir% " ^ - "%installationDir%\%srcDir%\%analysisLayerDir%\%yoloNetDir% " ^ - !roboCopyFlags! > nul -) else ( - robocopy /E "%yoloNetSrcPath%\%buildOutputDir% " ^ - "%installationDir%\%srcDir%\%analysisLayerDir%\%yoloNetDir% " ^ - !roboCopyFlags! -) -call :WriteLine Green "Done." - -:: Copy over the setup and startup scripts - -call :Write White "Copying over startup files..." -pushd "%rootPath%\install" -copy /Y "Setup_SenseAI_Win.bat" "!installationDir!" >nul 2>nul -copy /Y "Start_SenseAI_Win.bat" "!installationDir!" >nul 2>nul -copy /Y "Start_With_SenseAI_Detection.bat" "!installationDir!" >nul 2>nul -copy /Y "..\docs\Welcome.html" "!installationDir!" >nul 2>nul -popd -call :WriteLine Green "Done." - -call :Write White "Reloading base installation settings..." -( - for /f "tokens=*" %%x in (' more ^< "!rootPath!\%settingsFile%" ') do ( - set line=%%x - rem remove quotes, change " : " to "=", remove spaces - set line=!line:"=! - set line=!line: : ==! - set line=!line: =! - if not "!line:~0,1!" == "{" ( - if not "!line:~0,1!" == "}" ( - if "!line:~-1!" == "," set line=!line:~0,-1! - echo set !line! - ) - ) - ) -) > "!rootPath!\!settingsFile!.bat" -call !rootPath!\!settingsFile!.bat -del !rootPath!\!settingsFile!.bat -call :WriteLine Green "Done" - -call :Write White "Updating installation Environment variables..." - -:: For CodeProject.SenseAI -set CPSENSEAI_ROOTDIR=!installationDir! -set CPSENSEAI_APPDIR=!srcDir! -set CPSENSEAI_APIDIR=!senseAPIDir! -set CPSENSEAI_ANALYSISDIR=!analysisLayerDir! -set CPSENSEAI_CONFIG=Release -set CPSENSEAI_BUILDSERVER=False -set CPSENSEAI_PRODUCTION=True - -:: For DeepStack -set APPDIR=%CPSENSEAI_ROOTDIR%\%srcDir%\%analysisLayerDir%\%deepstackDir%\%intelligenceDir% -set MODELS_DIR=%CPSENSEAI_ROOTDIR%\%srcDir%\%analysisLayerDir%\%deepstackDir%\%modelsDir% -set DATA_DIR=%CPSENSEAI_ROOTDIR%\%srcDir%\%analysisLayerDir%\%deepstackDir%\%datastoreDir% -set TEMP_PATH=%CPSENSEAI_ROOTDIR%\%srcDir%\%analysisLayerDir%\%deepstackDir%\%tempstoreDir% -set PORT=5000 - -pushd install -call save_environment !installationDir!\!settingsFile! -popd -call :WriteLine Green "Done." - - -:: ============================================================================ -:: 6. Prepare the demos - -:: Do we need this given the HTML version has feature parity? -set includeDotnetDemo=false - -if /i "%includeDotnetDemo%" == "true" ( - REM Build .NET demo - call :Write White "Building .NET demo [%config%] ..." - pushd "%rootPath%\%demoDir%\dotNET\CodeProject.SenseAI.Playground" - - if /i "%verbosity%"=="quiet" ( - dotnet publish --configuration %config% -o "%buildOutputDir%" ^ - -p:PublishSingleFile=true --nologo !dotnetFlags! > nul - ) else ( - dotnet publish --configuration %config% -o "%buildOutputDir%" ^ - -p:PublishSingleFile=true --nologo !dotnetFlags! - ) - popd - call :WriteLine Green "Done." -) - -:: Copy demos - -call :Write White "Coping demos to installation..." - -if not exist "%installationDir%\%demoDir%" mkdir "%installationDir%\%demoDir%" > nul -pushd "%installationDir%\%demoDir%" - -if /i "%includeDotnetDemo%" == "true" ( - if not exist Playground mkdir Playground - robocopy /e "%rootPath%\%demoDir%\dotNET\CodeProject.SenseAI.Playground\%buildOutputDir% " ^ - Playground /XF *.pdb !roboCopyFlags! > nul -) - -if not exist Javascript mkdir Javascript -robocopy /e "%rootPath%\%demoDir%\Javascript " Javascript !roboCopyFlags! > nul - -call :WriteLine Green "Done." - - -:: Copy test data - -call :Write White "Coping test data to installation..." -if not exist TestData mkdir TestData -robocopy /e "%rootPath%\%demoDir%\TestData" ^ - "%installationDir%\%demoDir%\TestData" !roboCopyFlags! > nul - -call :WriteLine Green "Done." - -popd - -:: ============================================================================ -:: 7. Compress the final package if required -if /i "%compressInstallation%" == "true" ( - - call :WriteLine White "Compressing installation package..." - if exist "%installationPackage%" del "%installationPackage%" > nul 2>nul - - REM Try tar first. If that doesn't work, fall back to pwershell (slow) - set tarExists=true - - if /i "%verbosity%"=="quiet" ( - tar -caf "%installationPackage%" --cd "%installationDir%" *> nul 2>nul - ) else ( - tar -cvaf "%installationPackage%" --cd "%installationDir%" *" - ) - - if "%errorlevel%" == "9009" set tarExists=false - - if "!tarExists!" == "false" ( - powershell Compress-Archive -Force -Path "%installationDir%\*" ^ - -DestinationPath "%installationPackage%" ^ - -CompressionLevel Optimal - ) - - if ErrorLevel 0 ( - if exist "%installationPackage%" ( - if /i "%removeInstallationFolder%" == "true" ( - call :Write White "Removing installation folder..." - rmdir "%installationDir%" /s /q > nul - rem del /s /f /q "%installationDir%" > nul - call :WriteLine Green "Done" - ) - ) - ) -) - - -:: ============================================================================ -:: and we're done. - - -call :WriteLine Yellow "Installation folder creation complete" -call :WriteLine White "" -call :WriteLine White "" - -goto:eof - - - -:: ============================================================================ -:: ============================================================================ - -:: sub-routines - -:setESC - for /F "tokens=1,2 delims=#" %%a in ('"prompt #$H#$E# & echo on & for %%b in (1) do rem"') do ( - set ESC=%%b - exit /B 0 - ) - exit /B 0 - -:setColor - REM echo %ESC%[4m - Underline - REM echo %ESC%[7m - Inverse - - if /i "%2" == "foreground" ( - REM Foreground Colours - if /i "%1" == "Black" set currentColor=!ESC![30m - if /i "%1" == "DarkRed" set currentColor=!ESC![31m - if /i "%1" == "DarkGreen" set currentColor=!ESC![32m - if /i "%1" == "DarkYellow" set currentColor=!ESC![33m - if /i "%1" == "DarkBlue" set currentColor=!ESC![34m - if /i "%1" == "DarkMagenta" set currentColor=!ESC![35m - if /i "%1" == "DarkCyan" set currentColor=!ESC![36m - if /i "%1" == "Gray" set currentColor=!ESC![37m - if /i "%1" == "DarkGray" set currentColor=!ESC![90m - if /i "%1" == "Red" set currentColor=!ESC![91m - if /i "%1" == "Green" set currentColor=!ESC![92m - if /i "%1" == "Yellow" set currentColor=!ESC![93m - if /i "%1" == "Blue" set currentColor=!ESC![94m - if /i "%1" == "Magenta" set currentColor=!ESC![95m - if /i "%1" == "Cyan" set currentColor=!ESC![96m - if /i "%1" == "White" set currentColor=!ESC![97m - ) else ( - REM Background Colours - if /i "%1" == "Black" set currentColor=!ESC![40m - if /i "%1" == "DarkRed" set currentColor=!ESC![41m - if /i "%1" == "DarkGreen" set currentColor=!ESC![42m - if /i "%1" == "DarkYellow" set currentColor=!ESC![43m - if /i "%1" == "DarkBlue" set currentColor=!ESC![44m - if /i "%1" == "DarkMagenta" set currentColor=!ESC![45m - if /i "%1" == "DarkCyan" set currentColor=!ESC![46m - if /i "%1" == "Gray" set currentColor=!ESC![47m - if /i "%1" == "DarkGray" set currentColor=!ESC![100m - if /i "%1" == "Red" set currentColor=!ESC![101m - if /i "%1" == "Green" set currentColor=!ESC![102m - if /i "%1" == "Yellow" set currentColor=!ESC![103m - if /i "%1" == "Blue" set currentColor=!ESC![104m - if /i "%1" == "Magenta" set currentColor=!ESC![105m - if /i "%1" == "Cyan" set currentColor=!ESC![106m - if /i "%1" == "White" set currentColor=!ESC![107m - ) - exit /B 0 - -:WriteLine - SetLocal EnableDelayedExpansion - set resetColor=!ESC![0m - - if "%~2" == "" ( - Echo: - exit /b 0 - ) - - if /i "%techniColor%" == "true" ( - REM powershell write-host -foregroundcolor %1 %~2 - call :setColor %1 foreground - echo !currentColor!%~2!resetColor! - ) else ( - Echo %~2 - ) - exit /b 0 - -:Write - SetLocal EnableDelayedExpansion - set resetColor=!ESC![0m - - if /i "%techniColor%" == "true" ( - REM powershell write-host -foregroundcolor %1 -NoNewline %~2 - call :setColor %1 foreground - "!envVariablesFile!.bat" -call !envVariablesFile!.bat -del !envVariablesFile!.bat - -goto:eof - -:errorNoSettingsFile -Echo: -Echo: -Echo --------------------------------------------------------------------------- -Echo Error: %settingsFile% settings file not found -Echo Ensure you have run setup_dev_env_win.bat before running this script \ No newline at end of file diff --git a/Installers/Old School/save_environment.bat b/Installers/Old School/save_environment.bat deleted file mode 100644 index 151bcc8f..00000000 --- a/Installers/Old School/save_environment.bat +++ /dev/null @@ -1,39 +0,0 @@ -:: =============================================================================================== -:: -:: CodeProject SenseAI Server script to loasaved environment variables to a config file -:: -:: Copyright CodeProject 2021 -:: -:: =============================================================================================== - - -@echo off -SETLOCAL EnableDelayedExpansion - -set envConfigFile=CodeProject.SenseAI.json -if not "%1" == "" set envConfigFile=%1 -set envConfigFile=!envConfigFile:"=! - -( -echo { -REM SenseAI Application values -echo "CPSENSEAI_ROOTDIR" : "!CPSENSEAI_ROOTDIR!", -echo "CPSENSEAI_APPDIR" : "!CPSENSEAI_APPDIR!", -echo "CPSENSEAI_APIDIR" : "!CPSENSEAI_APIDIR!", -echo "CPSENSEAI_ANALYSISDIR" : "!CPSENSEAI_ANALYSISDIR!", -echo "CPSENSEAI_PORT" : "!PORT!", -echo "CPSENSEAI_PROFILE" : "!PROFILE!", -echo "CPSENSEAI_PRODUCTION" : "!CPSENSEAI_PRODUCTION!", -echo "CPSENSEAI_CONFIG" : "!CPSENSEAI_CONFIG!", -echo "CPSENSEAI_BUILDSERVER" : "!CPSENSEAI_BUILDSERVER!", - -REM DeepStack compatible values -echo "APPDIR" : "!APPDIR!", -echo "PROFILE" : "!PROFILE!", -echo "CUDA_MODE" : "!CUDA_MODE!", -echo "DATA_DIR" : "!DATA_DIR!", -echo "TEMP_PATH" : "!TEMP_PATH!", -echo "MODELS_DIR" : "!MODELS_DIR!", -echo "PORT" : "!PORT!", -echo } -) > "!envConfigFile!" \ No newline at end of file diff --git a/Installers/Old School/set_environment.sh b/Installers/Old School/set_environment.sh deleted file mode 100644 index a72e1c16..00000000 --- a/Installers/Old School/set_environment.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh - -## If you wish to have a single environment variable file for sharing between -## operating systems then this script will read the Windows .bat version and -## export the variables within a *nix shall -## -## Usage: -## . ./set_environment.sh - -unamestr=$(uname) -if [ "$unamestr" = 'Linux' ]; then - export $(grep -v '^REM' set_environment.bat | xargs -d '\n') -elif [ "$unamestr" = 'FreeBSD' ]; then - export $(grep -v '^REM' set_environment.bat | xargs -0) -fi \ No newline at end of file diff --git a/Installers/Windows/BackgroundRemover.Installer/BackgroundRemover.Installer.wixproj b/Installers/Windows/BackgroundRemover.Installer/BackgroundRemover.Installer.wixproj new file mode 100644 index 00000000..6d973e08 --- /dev/null +++ b/Installers/Windows/BackgroundRemover.Installer/BackgroundRemover.Installer.wixproj @@ -0,0 +1,52 @@ + + + + Debug + x86 + 3.10 + 7e70c376-7119-4c90-ac30-344eea29594e + 2.0 + BackgroundRemover.Installer-1.3.0 + Package + + + bin\$(Configuration)\ + obj\$(Configuration)\ + Debug + -arch x64 + + + bin\$(Configuration)\ + obj\$(Configuration)\ + -arch x64 + True + + + + + + + + $(WixExtDir)\WixUtilExtension.dll + WixUtilExtension + + + $(WixExtDir)\WixUIExtension.dll + WixUIExtension + + + + + + + + + ModelsHarvestPath=..\..\..\src\AnalysisLayer\BackgroundRemover + + + + + + + + \ No newline at end of file diff --git a/Installers/Windows/BackgroundRemover.Installer/BackgroundRemoverFiles.wxs b/Installers/Windows/BackgroundRemover.Installer/BackgroundRemoverFiles.wxs new file mode 100644 index 00000000..8bb32200 --- /dev/null +++ b/Installers/Windows/BackgroundRemover.Installer/BackgroundRemoverFiles.wxs @@ -0,0 +1,105 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Installers/Windows/BackgroundRemover.Installer/Product.wxs b/Installers/Windows/BackgroundRemover.Installer/Product.wxs new file mode 100644 index 00000000..541b953f --- /dev/null +++ b/Installers/Windows/BackgroundRemover.Installer/Product.wxs @@ -0,0 +1,42 @@ + + + + + + + + + + + + + + + + + + + + + + NOT Installed + + + + Installing required Python Packages ... (This will take several minutes) + + + + + \ No newline at end of file diff --git a/Installers/Windows/DeepStack.Installer/DeepStack.Installer.wixproj b/Installers/Windows/DeepStack.Installer/DeepStack.Installer.wixproj index cb8e1b3e..572ad7ee 100644 --- a/Installers/Windows/DeepStack.Installer/DeepStack.Installer.wixproj +++ b/Installers/Windows/DeepStack.Installer/DeepStack.Installer.wixproj @@ -6,7 +6,7 @@ 3.10 ed492608-6013-4552-a29b-a7e14f4beb0b 2.0 - CodeProject.SenseAI.Legacy-1.2.1 + CodeProject.SenseAI.Legacy-1.3.0 Package @@ -45,11 +45,11 @@ Other similar extension points exist, see Wix.targets. --> - ModelsHarvestPath=..\..\..\src\AnalysisLayer\DeepStack\intelligenceLayer + ModelsHarvestPath=..\..\..\src\AnalysisLayer\DeepStack - + diff --git a/Installers/Windows/DeepStack.Installer/DeepStackFiles.wxs b/Installers/Windows/DeepStack.Installer/DeepStackFiles.wxs index c7ce3c47..f17203be 100644 --- a/Installers/Windows/DeepStack.Installer/DeepStackFiles.wxs +++ b/Installers/Windows/DeepStack.Installer/DeepStackFiles.wxs @@ -1,189 +1,251 @@  - - - - - - + + + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + - - + + - - + + \ No newline at end of file diff --git a/Installers/Windows/DeepStackModels.Installer/DeepStackModels.Installer.wixproj b/Installers/Windows/DeepStackModels.Installer/DeepStackModels.Installer.wixproj index b112605f..c8a43f85 100644 --- a/Installers/Windows/DeepStackModels.Installer/DeepStackModels.Installer.wixproj +++ b/Installers/Windows/DeepStackModels.Installer/DeepStackModels.Installer.wixproj @@ -6,7 +6,7 @@ 3.10 d0b74be9-8195-4907-aeee-45631e37251f 2.0 - CodeProject.SenseAI.LegacyModels-1.2.1 + CodeProject.SenseAI.LegacyModels-1.3.0 Package diff --git a/Installers/Windows/PortraitFilter.Installer/PortraitFilter.Installer.wixproj b/Installers/Windows/PortraitFilter.Installer/PortraitFilter.Installer.wixproj new file mode 100644 index 00000000..7351de51 --- /dev/null +++ b/Installers/Windows/PortraitFilter.Installer/PortraitFilter.Installer.wixproj @@ -0,0 +1,62 @@ + + + + Debug + x86 + 3.10 + 476475ce-3c81-446b-914e-ac472d41b82a + 2.0 + PortraitFilter.Installer-1.3.0 + Package + + + bin\$(Configuration)\ + obj\$(Configuration)\ + Debug + -arch x64 + + + bin\$(Configuration)\ + obj\$(Configuration)\ + -arch x64 + True + + + + + + + + PortraitFilter + {ab47cc99-15ed-4928-a2f2-dc8f83da9482} + True + True + Binaries;Content;Satellites + INSTALLFOLDER + + + + + $(WixExtDir)\WixUtilExtension.dll + WixUtilExtension + + + $(WixExtDir)\WixUIExtension.dll + WixUIExtension + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Installers/Windows/PortraitFilter.Installer/PortraitFilterInstallFiles.wxs b/Installers/Windows/PortraitFilter.Installer/PortraitFilterInstallFiles.wxs new file mode 100644 index 00000000..8422a174 --- /dev/null +++ b/Installers/Windows/PortraitFilter.Installer/PortraitFilterInstallFiles.wxs @@ -0,0 +1,334 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Installers/Windows/PortraitFilter.Installer/Product.wxs b/Installers/Windows/PortraitFilter.Installer/Product.wxs new file mode 100644 index 00000000..5417ad54 --- /dev/null +++ b/Installers/Windows/PortraitFilter.Installer/Product.wxs @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Installers/Windows/Python37.Installer/Product.wxs b/Installers/Windows/Python37.Installer/Product.wxs index f261f4da..3b7ce7ee 100644 --- a/Installers/Windows/Python37.Installer/Product.wxs +++ b/Installers/Windows/Python37.Installer/Product.wxs @@ -17,7 +17,9 @@ - + + + 3.10 3f34eaee-8a53-40d1-8cdb-a13ae728fa06 2.0 - CodeProject.SenseAI.Python37-1.2.1 + CodeProject.SenseAI.Python37-1.3.0 Package @@ -24,6 +24,7 @@ + @@ -41,11 +42,12 @@ Other similar extension points exist, see Wix.targets. --> - ModelsHarvestPath=..\..\..\installers\downloads\windows\Python37 + ModelsHarvestPath=..\..\..\installers\downloads\windows\Python37;PythonSdkHarvestPath=..\..\..\src\AnalysisLayer\SDK\Python - + + diff --git a/Installers/Windows/Python37.Installer/Python37Files.wxs b/Installers/Windows/Python37.Installer/Python37Files.wxs index 5177339c..22aadf74 100644 --- a/Installers/Windows/Python37.Installer/Python37Files.wxs +++ b/Installers/Windows/Python37.Installer/Python37Files.wxs @@ -11,7 +11,7 @@ - + diff --git a/Installers/Windows/Python37.Installer/PythonSdkFiles.wxs b/Installers/Windows/Python37.Installer/PythonSdkFiles.wxs new file mode 100644 index 00000000..dbb6591e --- /dev/null +++ b/Installers/Windows/Python37.Installer/PythonSdkFiles.wxs @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Installers/Windows/Python39.Installer/Product.wxs b/Installers/Windows/Python39.Installer/Product.wxs new file mode 100644 index 00000000..7107cd62 --- /dev/null +++ b/Installers/Windows/Python39.Installer/Product.wxs @@ -0,0 +1,55 @@ + + + + + + + + + + + + + + + + + + + + NOT Installed + NOT Installed + REMOVE="ALL" + + + + Updgrading Pip ... + Creating a Python Virtual Environment ... + Removing Python Virtual Environment ... + + + + + + + diff --git a/Installers/Windows/Python39.Installer/Python39.Installer.wixproj b/Installers/Windows/Python39.Installer/Python39.Installer.wixproj new file mode 100644 index 00000000..5161bbef --- /dev/null +++ b/Installers/Windows/Python39.Installer/Python39.Installer.wixproj @@ -0,0 +1,42 @@ + + + + Debug + x86 + 3.10 + 0dbc85bc-52c3-491f-90f9-6d728ffa2e8f + 2.0 + Python39.Installer-1.3.0 + Package + + + bin\$(Configuration)\ + obj\$(Configuration)\ + Debug + -arch x64 + + + bin\$(Configuration)\ + obj\$(Configuration)\ + -arch x64 + True + + + + + + + + + + + + ModelsHarvestPath=..\..\..\installers\downloads\windows\Python39 + + + + + + + + \ No newline at end of file diff --git a/Installers/Windows/Python39.Installer/Python39Files.wxs b/Installers/Windows/Python39.Installer/Python39Files.wxs new file mode 100644 index 00000000..9202bd5d --- /dev/null +++ b/Installers/Windows/Python39.Installer/Python39Files.wxs @@ -0,0 +1,12949 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Installers/Windows/SenseAI.BootStrapper/Bundle.wxs b/Installers/Windows/SenseAI.BootStrapper/Bundle.wxs index dd2aae31..bb59be55 100644 --- a/Installers/Windows/SenseAI.BootStrapper/Bundle.wxs +++ b/Installers/Windows/SenseAI.BootStrapper/Bundle.wxs @@ -28,26 +28,28 @@ - + - + - - - - - - - + + + + diff --git a/Installers/Windows/SenseAI.BootStrapper/SenseAI.BootStrapper.wixproj b/Installers/Windows/SenseAI.BootStrapper/SenseAI.BootStrapper.wixproj index dc35719c..93c9e29b 100644 --- a/Installers/Windows/SenseAI.BootStrapper/SenseAI.BootStrapper.wixproj +++ b/Installers/Windows/SenseAI.BootStrapper/SenseAI.BootStrapper.wixproj @@ -6,7 +6,7 @@ 3.10 c04bbd0d-fd36-4fa4-805b-106bccd9bc79 2.0 - CodeProject.SenseAI.Server-1.2.1 + CodeProject.SenseAI.Server-1.3.0 Bundle @@ -36,6 +36,14 @@ + + BackgroundRemover.Installer + {7e70c376-7119-4c90-ac30-344eea29594e} + True + True + Binaries;Content;Satellites + INSTALLFOLDER + DeepStack.Installer {ed492608-6013-4552-a29b-a7e14f4beb0b} @@ -52,6 +60,14 @@ Binaries;Content;Satellites INSTALLFOLDER + + PortraitFilter.Installer + {476475ce-3c81-446b-914e-ac472d41b82a} + True + True + Binaries;Content;Satellites + INSTALLFOLDER + Python37.Installer {3f34eaee-8a53-40d1-8cdb-a13ae728fa06} @@ -60,6 +76,14 @@ Binaries;Content;Satellites INSTALLFOLDER + + Python39.Installer + {0dbc85bc-52c3-491f-90f9-6d728ffa2e8f} + True + True + Binaries;Content;Satellites + INSTALLFOLDER + SenseAI.Server.Installer {a1afa75c-324e-4b79-be13-5557e495fbbe} diff --git a/Installers/Windows/SenseAI.Server.Installer/DemoImangesFiles.wxs b/Installers/Windows/SenseAI.Server.Installer/DemoImangesFiles.wxs index a7c72dd7..e29ae81f 100644 --- a/Installers/Windows/SenseAI.Server.Installer/DemoImangesFiles.wxs +++ b/Installers/Windows/SenseAI.Server.Installer/DemoImangesFiles.wxs @@ -77,9 +77,6 @@ - - - diff --git a/Installers/Windows/SenseAI.Server.Installer/FrontendInstallFiles.wxs b/Installers/Windows/SenseAI.Server.Installer/FrontendInstallFiles.wxs index cd209a3b..8cfc34b0 100644 --- a/Installers/Windows/SenseAI.Server.Installer/FrontendInstallFiles.wxs +++ b/Installers/Windows/SenseAI.Server.Installer/FrontendInstallFiles.wxs @@ -26,11 +26,17 @@ - - + + - - + + + + + + + + @@ -86,14 +92,14 @@ - - + + - - + + diff --git a/Installers/Windows/SenseAI.Server.Installer/Product.wxs b/Installers/Windows/SenseAI.Server.Installer/Product.wxs index 83406dc8..63111e46 100644 --- a/Installers/Windows/SenseAI.Server.Installer/Product.wxs +++ b/Installers/Windows/SenseAI.Server.Installer/Product.wxs @@ -27,7 +27,8 @@ Guid="22ba67a0-de8d-45dc-abb1-ac07a0919106" Directory="CODEPROJECTSENSEAISTARTMENUFOLDER"> - + 3.10 a1afa75c-324e-4b79-be13-5557e495fbbe 2.0 - CodeProject.SenseAI.WebAPI-1.2.1 + CodeProject.SenseAI.WebAPI-1.3.0 Package SenseAI.Server.Installer diff --git a/Installers/Windows/SharedDirectories.wxi b/Installers/Windows/SharedDirectories.wxi index de553811..78779d2c 100644 --- a/Installers/Windows/SharedDirectories.wxi +++ b/Installers/Windows/SharedDirectories.wxi @@ -9,10 +9,16 @@ + + + + + + @@ -22,6 +28,8 @@ + + diff --git a/Installers/Windows/SharedProperties.wxi b/Installers/Windows/SharedProperties.wxi index 139c2b06..364f8bcc 100644 --- a/Installers/Windows/SharedProperties.wxi +++ b/Installers/Windows/SharedProperties.wxi @@ -1,5 +1,5 @@ - - + + diff --git a/Installers/Windows/TextSummary.Installer/TextSummary.Installer.wixproj b/Installers/Windows/TextSummary.Installer/TextSummary.Installer.wixproj index e01f80bd..09f91330 100644 --- a/Installers/Windows/TextSummary.Installer/TextSummary.Installer.wixproj +++ b/Installers/Windows/TextSummary.Installer/TextSummary.Installer.wixproj @@ -6,7 +6,7 @@ 3.10 f675bea0-4a75-4b30-9e70-cbbe8641e9cd 2.0 - CodeProject.SenseAI.TextSummary-1.2.1 + CodeProject.SenseAI.TextSummary-1.3.0 Package diff --git a/Installers/Windows/TextSummary.Installer/TextSummaryFiles.wxs b/Installers/Windows/TextSummary.Installer/TextSummaryFiles.wxs index d4e7cbd5..2b764217 100644 --- a/Installers/Windows/TextSummary.Installer/TextSummaryFiles.wxs +++ b/Installers/Windows/TextSummary.Installer/TextSummaryFiles.wxs @@ -8,12 +8,15 @@ + + + + + + - - - @@ -107,6 +110,12 @@ + + + + + + diff --git a/Installers/Windows/YoloNet.Installer/YoloNet.Installer.wixproj b/Installers/Windows/YoloNet.Installer/YoloNet.Installer.wixproj index 976e4989..84baa425 100644 --- a/Installers/Windows/YoloNet.Installer/YoloNet.Installer.wixproj +++ b/Installers/Windows/YoloNet.Installer/YoloNet.Installer.wixproj @@ -6,7 +6,7 @@ 3.10 e0aede8a-e336-49c6-9d6a-7a05e8155ab9 2.0 - CodeProject.SenseAI.YoloNet-1.2.1 + CodeProject.SenseAI.YoloNet-1.3.0 Package @@ -25,16 +25,6 @@ - - - CodeProject.SenseAI.AnalysisLayer.Yolo - {232710a8-9180-4139-8ff2-8f21f649d927} - True - True - Binaries;Content;Satellites - INSTALLFOLDER - - $(WixExtDir)\WixUtilExtension.dll @@ -45,6 +35,16 @@ WixUIExtension + + + ObjectDetector + {232710a8-9180-4139-8ff2-8f21f649d927} + True + True + Binaries;Content;Satellites + INSTALLFOLDER + + @@ -55,7 +55,7 @@ Other similar extension points exist, see Wix.targets. --> - + diff --git a/Installers/Windows/YoloNet.Installer/YoloNetInstallFiles.wxs b/Installers/Windows/YoloNet.Installer/YoloNetInstallFiles.wxs index 3d59d015..ee06234e 100644 --- a/Installers/Windows/YoloNet.Installer/YoloNetInstallFiles.wxs +++ b/Installers/Windows/YoloNet.Installer/YoloNetInstallFiles.wxs @@ -9,220 +9,229 @@ - + - + + + + + + + - + - + - + - - - - - - - - - - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + + + + + + + + + + + + + - + - + - + - + - - - - + - - - - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + diff --git a/THIRD-PARTY-NOTICES.md b/THIRD-PARTY-NOTICES.md index cad2bff8..af0a6a8d 100644 --- a/THIRD-PARTY-NOTICES.md +++ b/THIRD-PARTY-NOTICES.md @@ -3,31 +3,6 @@ This project incorporates components from the projects listed below -### ApexCharts -##### A modern JavaScript charting library -https://github.com/apexcharts/apexcharts.js - -The MIT License (MIT) - -Copyright (c) 2018 ApexCharts - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. ### Bootstrap ##### Sleek, intuitive, and powerful front-end framework for faster and easier web development @@ -239,6 +214,63 @@ https://github.com/johnolafenwa/DeepStack END OF TERMS AND CONDITIONS +### Portrait-Mode +##### High quality implementation of the portrait mode effect using Neural Networks. +Originally: https://github.com/mayank26saxena/portrait-mode + +C# Port: https://github.com/asiryan/Portrait-mode + +MIT License + +Copyright (c) 2018 Mayank Saxena + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +### Rembg +##### Rembg is a tool to remove images background. +https://github.com/danielgatis/rembg + +MIT License + +Copyright (c) 2020 Daniel Gatis + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.N CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + ### Swashbuckle.AspNetCore ##### Generate beautiful API documentation https://github.com/domaindrivendev/Swashbuckle.AspNetCore diff --git a/docs/Articles/How-to-add-AI-to-an-app.md b/docs/Articles/How-to-add-AI-to-an-app.md new file mode 100644 index 00000000..a321cd5c --- /dev/null +++ b/docs/Articles/How-to-add-AI-to-an-app.md @@ -0,0 +1,244 @@ +# How to add Artificial Intelligence to an existing Application + +## Introduction + +If you haven't already been asked to add Artificial Intelligence (AI) capabilities to an app then +it's probably only a matter of time before the topic is raised. + +Adding AI capabilities isn't hard. However, that's like saying adding database support to an app +isn't hard. It's not, but choosing the correct database, setting up the schema and stored +procedures can be hard work. Then you need to decide whether the database should be on the same +server, different server. You also need to decide which database you'll use: relational, document +based, Key/Value... It can get complicated. + +AI is just like that. Add a library, use a local service, use a hosted service, which service +do I use? How do I set it up. And then the tricky questions: how much will it cost? How will +my data be handled? How secure is it? + +So let's do a quick walk through of your options so you at least know the questions to ask. + +## Writing it yourself + +I'll start by saying this is how we started our foray into AI a decade ago and I really +wouldn't recommend it. There are so, so many brilliant researchers who have spent a zillion +man-hours building incredibly powerful and efficient AI libraries and models based on a +fast evolving corpus of research into AI that it's simply easier, faster and safer to use +one of the many AI solutions already available. + +Having said that, diving into something like a simple neural network to build up the ability +to classify data based on your specific scenarios can be fulfilling, provide great results, +and will result in little overhead. CodeProject's SPAM filter is just such a beast and anything +bigger would, in our view, be overkill. The right tool for the job in this case. + +**Pros**: + - It's fun writing code. + - You get exactly what you need and nothing more + - You may end up with a far smaller codebase since you're not importing libraries and all their + dependencies + +**Cons** +- You're reinventing the wheel +- You'll (probably) not do it as well, as accurately, or have a solution as fast as what is + already out there. +- It may be a distraction from your core business case. It could end up costing you more in + time, missed opportunities and developer time than simply using an existing solution. + +## Using an AI library or toolkit directly in your code + +If you wish to include AI processing directly in your code base then you can't go wrong using +libraries such as such as Tensorflow or PyTorch. There are lots of mature, supported, easy to use +libraries available for multiple languages and platforms. They take care of the hardwork for you, +and together with the many pre-trained models out there, all you need to do is include the toolkit, +load the model, input your data, run the inference and output the results. + +Here's how to use the latest YOLO5 model in python: + +```python +import torch # import the Torch library + +model = torch.hub.load('ultralytics/yolov5', 'yolov5s') # Load the YOLO model +img = '~/username/images/street.jpg' # Specify the image +results = model(img) # Perform the inference +results.print() # Output the results +``` + +How easy is that! + +Issues start to arise when you need to cater for different model versions, and the versions +of the libraries that the models were trained for. And the versions of the compiler or +interpreter needed for the libraries that are needed for the models. And then what happens if +all of this conflicts with the libraries, interpreter versions and even hardware requirements +in other parts of your app? + +It can be a real challenge, especially for, say, Python, where you may need to setup multiple +virtual environments, each with their own copies of the library packages, and each using a +different version of Python. Keeping these in sync and uncorrupted can take a lot of patience. + +You may have a wonderful solution for, say, Python 3.7, but when it's run on another machine +that has Python 3.11 installed, it may simply fail. + +Adding AI directly into your application can mean you will need to be extremely careful to ensure +you always deploy all the parts needed as one unit. Docker will save you here, but to many that +kind of overhead may not be acceptable. + +Finally, in adding an AI toolkit to your app you need to remember that you'll also be adding the +model itself. AI models can be big, Gigabyte big. + +**Pros** + +- You build on the work of brilliant developers and researchers before you +- Many of the libraries are Open Source so you can view and audit the code +- AI libraries and tools are being developed and refined at breakneck speed. There is a constant + stream of new features and improved performance being released +- The libraries are generally very easy to use +- There are tools to allow conversion of models between libraries. +- There's a model for almost any language and platform + +**Cons** + +- There's definitely a learning curve to using a library +- You may be restricted to using a particular model format for the given library +- There's so, so many libraries. The paradox of choice. +- Including a library rarely means just one library: it usually brings along all its friends and + relatives and distant cousins. Things can get bloated +- Inluding a library means including the models. Things can get really, really big, fast. +- You have to ensure you keep your compiler/interpreter, libraries and models in sync with regards + to versioning. Never assume the defualt installation of python, for instance, will work with + your code. + + +### Using an abstracting library (.NET ML, openVino) + +Many libraries require you use a specific form of pre-trained model, or they require a different +library for different hardware. This issue is solved by libraries such as ML.NET and openVino that +work to aggregate and abstract libraries and hardware in order to provide a single API for your +AI operations. + +**Pros** + +- All the pros of using a dedicated library +- No need to have specific version for specific hardware. The libraries will adapt dynamically +- You're able to easily consume a wider range of models +- You're somewhat future proofed against new hardware and model formats + +**Cons** + +- An aggregation of libraries and capabilities will result in a larger footprint. +- Abstraction may result in the "least common denominator" issue whereby a library only exposes + common functionality, meaning you lose access to some features or fine tuning available in a + dedicated library +- Your choice of language or platform may be limited. + + +## Hosted AI Service + +Using a hosted AI service means you do away with all the issues involved with libraries and hardware +and compatible toolkits and dragging around GB of models. You make a call to a hosted AI service +and the result comes back milliseconds later over your low latency, high bandwidth internet +connection. Assuming you have one of those, of course. + +The range of services offered by hosted providers is truly amazing. Pre-built models, fast hardware, +great APIs. Just be aware of the cost. + +When thinking about the cost you need to understand the charges. Will it cost to upload data to +the provider? What about downloading results? What's the cost pre request and how is it calculated? +Some services will charge per request, some per processing unit, some per time. You also need to +factor in the cost of data storage and any licensing costs that may be applicable. Note also that +the cost will be affected to a high degree by the tasks: passing in data that is applied to a pre- +trained model is one thing, but passing in terrabytes of data for training new models is order of +magnitudes more expensive. GPT-3, for instance, is rumoured to have cost around $5 million to +train. + +There are options to reduce your cost. One method is to mix and match service providers: Upload +and store your data with a provider such as DELL that has cheap storage. Send this data to Azure, +which may not have storage ingesting charges, train the model, and send the results back to your +DELL storage. Your data is safe and stored relatively cheaply on one provider, while another +provider has done the heavy lifting of training your model. Sending data between large hosting +providers is often extremely fast due to the massive pipes they sit on. + +If you are simply using the hosting provider for AI inferencing (ie sending data to an AI model to +have a prediction or analysis made by the model) then you should also be of constraints such as +limits to the absolute number of calls, as well as any throttling that would limit the number of +calls in a given period of time. How will your users react if a piece of functionality disappears +because other users have exhausted your quota for the day? + +You also need to understand where the data goes and how the laws in that jurisdiction may affect +you. Will a copy of your data be stored in a foreign jurisdiction? Will your data feed be monitored +or made available to third parties? A webcam feed from inside a person's home may not be something +a user or your app wants to know is being sent to a foreign country for processing. There may even +be legal or insurance restrrictions you need to be aware of if sending personally identifiable +data outside of your country. + +**Pros** + +- Fast, powerful, and you get access to the latest and greatest +- No need to worry about AI libraries or versions +- No need to worry about hardware speed or capacity. Your credit card is your only limiter. +- You're able to easily use a wide range of models +- You're future proofed. You'll have access to the latest and greatest +- Your apps will be smaller. No carrying around library code or models +- They will work with any language that can make an API call over HTTP + +**Cons** + +- You will probably need a decent internet connection to make use of these services +- They can be expensive, or more often than not, ambiguous or opaque in what it will actually cost +- The system is closed. You can't really see what's happening behind the curtains +- You don't control where your data goes. This can be an issue for privacy and security. +- You may face quota or usage issues + + +## Local AI Service + +So what if you don't want to write your own code for AI, you want to use any language or platform +you choose to for your AI analysis, you don't want your data to leave your local network (or even +your machine) and you don't want to pay an unknown amount for a service that you know is available +for free. + +A locally hosted AI service such as CodeProject SenseAI is a great example of an AI service that +is the best of both worlds. You do not need to worry about dealing with libraries and versions, +and any language that can make a HTTP call can interact with the service. + +**Pros** + +- Local Open Source AI servers can be found online and used for free +- There are no usage limits +- Your data stays where you can see it. Nothing gets sent outside the network unless you choose. +- Like hosted AI services, you don't need to worry about libraries or versioning. It's all within + the bounds of the service +- You get the benefit of accessing multiple AI features easily without needing to install a library + or toolkit for each AI operation you want to perform + +**Cons** + +- The installer for the service may be large depending on what models and features are included. +- Like using a library directly, you are limited by the power of the machine the service is + installed on +- Again, like using a library directly, you won't get updates automatically. +- The AI features offered will not be as large as a hosted service. However, a single service may + provide multiple AI operations from multiple providers, all presented through a unified API + +## Next Steps + +Adding AI to an application can fundamentally expand the capabilities of an application while +reducing complexity. Heuristics and hard coded if-then statements get replaced by training sets +based on (hopefully) a wide range of real world data that traditional binary logic can't easily +encompass. + +The manner in which you add, AI, has equally fundamental consequences and the choice of how you +do this depends on your requirements, your budget and ultimately your circumstances. + +At CodeProject we have dabbled with all of the methods outlined above in adding AI to our +systems. Our experience has ranged from being pleasantly surprised at how easy some methods +are, to outright enraged at having to fight the tools every step of the way. + +In the end we wanted to share our experience (and the fun) it working with AI to as many developers as +possible without asking them to go through the frustration. Hunting down compatible libraries, +dealing with models, system tools, paths, oddities between operating systems and oddities between +the same operating system on different CPUs was and still is incredibly time consuming. We built +SenseAI Server as a means to wrap up this complexity into a self contained package that did all +the grunt work for you. Once installed you were immediately at the point were you could start +playing with, and using, AI in your apps. + +Download the installer [link] and give it a try. + diff --git a/docs/Articles/ModuleStatus.png b/docs/Articles/ModuleStatus.png new file mode 100644 index 00000000..b1f6047c Binary files /dev/null and b/docs/Articles/ModuleStatus.png differ diff --git a/docs/Articles/adding-a-module.md b/docs/Articles/adding-a-module.md new file mode 100644 index 00000000..58e8c8d5 --- /dev/null +++ b/docs/Articles/adding-a-module.md @@ -0,0 +1,493 @@ + +## Introduction + +Adding AI capabilities to an app is reasonably straight forward if you're happy +to follow the twisty turny maze that is the endless list of libraries, tools, +interpreters, package managers and all the other fun stuff that sometimes makes coding +about as fun as doing the dishes. + +We built SenseAI to hide all the annoying things from developers and leave them +with a simple AI package that Does Stuff and is easy to use with an existing application. + +If SenseAI doesn't do what you need then it's easy to add modules that will fill +in the gap. + +![Module Status Panel](pexels-thirdman-7268587.jpg)![Module Status Panel](dog_and_man_rembg.png) + +## The SenseAI architecture in under 30 seconds + +SenseAI is an HTTP based REST API server. It's basically just a webserver that +your application sends requests. Those requests are placed on a queue, and the +analysis services (aka The Modules) pick request off the queues they know how to +service. Each request is then processed (an AI operation is performed based on the +request) and the results are sent back to the API server, which in turn sends it back +to the application that made the initial call. + + request |--------------------------------------| |---------------------------------| + |-----| ---> | [API Server] ---> Queue1 (eg images) | Json | Module 1 (eg object detection) | + | App | | ---> Queue2 (eg sounds) | <--> | Module 2 (eg voice recognition) | + |-----| <--- | ---> Queue2 (eg text) | | Module 3 (eg song recognition) | + response |--------------------------------------| |---------------------------------| + + +1. An application sends a request to the API server +2. The API server places the request on the appropriate queue +3. The backend modules poll the queue they are interested in, grab a request and process it +4. The backend module then sends the result back to the API server +5. The API Server then sends the result back to the calling application + +The Sense API Server runs independently of the calling application. + +> Think of SenseAI like a database server or any other service you have running in the background: +> it runs as a service or daemon, you send it commands and it responds with results. You don't +> sweat the details of how it goes about its business, you just focus on your application's core +> business. + +## Adding a new module to SenseAI + +There are 2 tasks in adding a new module + +1. Ensuring any prerequisites such as models, libraries or interpreters are installed correctly +2. Writing the actual module and wiring it up to SenseAI + +### Adding prerequisites + +Prerequisites such as models, interpreters, compilers and tools will need to be added to the dev +setup script in order to allow everyone working on the code to have a dev environment that works +for that module. If you choose to include your module in a Windows installer or Docker image, then +you will need to update both of those environements as well. + +#### Required (for development) + +2. **The Dev Environment setup script**. In /Installers/Dev are the setup scripts that should be + run in order to setup the dev environment for build, debug and testing. If you need to download + and/or install models, programs or tools ensure you add the commands to these scripts. + + There are two scripts, one for Windows and one for Linux/macOS. The scripts are broken into sections, + including general setup, reusable functions, and blocks of script relevant to each given module. + +#### Optional (for the end users) + +1. **The Windows installer**. If you wish to have your new module be installed on Windows with the + exsiting modules you will need to create a sub-installer for any assets that need to be put + in place. This is non-trivial and only required if you wish to distribute your work as part of + the main installer. + + In the future, a module's assets will simply be zipped together with a manifest file which the + installer will inspect and install in the correct locations automatically. + +3. **The Docker Image** If you wish to have your new module be installed in the Docker image you + will need to update the main Docker image to ensure it includes the assets and tools you need. + +Both of these are outside the scope of this article + +### Writing a module + +Writing a module is the fun part. In fact you often don't have to write a new module: there are +hundreds of excellent Open Source, self-contained AI projects that would make excellent modules. +All you need to do is ensure the module can run in the installed environent (taken care of in +the pre-requisites step), that any models it needs are downloaded and in the right place (again, +should already be done), and that the module can communicate with the SenseAI server. + +We will be providing a simple SDK for many languages that will help allow you to write a shim that +will fit between the module and SenseAI and take care of communication. + +## Let's add a module + +We're going to add the [remgb](https://github.com/danielgatis/rembg) module. This +is a simple but fun AI module that takes any phote containing a subject and removes +the background from the image. It runs under Python 3.9 or above. + +### Setup (the installer) + +the rembg module comprises the following + +1. The python code +1. The python 3.9 interpreter +1. Some Python packages +1. The AI models + +To ensure these are all in place within the development environment we need to modify the setup +scripts in /Installers/Dev. + +#### For Windows (setup_dev_env_win.bat) + +``` batch +:: Background Remover ::::::::::::::::::::::::::::::::::::::::::::::::::::::::: + +:: The name of the dir containing the Background Remover module +set moduleDir=BackgroundRemover + +:: The full path of the background remover module +set modulePath=%analysisLayerPath%\%moduleDir% + +:: The name of the dir containing the background remover models +set moduleAssetsDir=models + +:: The name of the file in our S3 bucket containing the assets required for this module +set modelsAssetFilename=rembg-models.zip + +:: Install python and the required dependencies +call :SetupPython 3.9 +call :InstallPythonPackages 3.9 "%modulePath%\requirements.txt" "onnxruntime" + +:: Download the AI models +call :Download "%storageUrl%" "%downloadPath%\" "%modelsAssetFilename%" "%moduleDir%" ^ + "Downloading Background Remover models..." + +:: Copy over the models to the working directory for this module. +if exist "%downloadPath%\%modulesDir%" ( + robocopy /e "%downloadPath%\%moduleDir% " "%modulePath%\%moduleAssetsDir% " +) +``` + +The code should be self-explanatory: + +1. Setup the names of the directories and subdirectories that will hold the module, as well as the + name of the asset to be downloaded. + + **analysisLayerPath** is the full path to the Analysis folder that contains all modules. In our + case this is currently /src/AnalysisLayer + +2. Install Python 3.9 and then install the Python packages as listed in the requirements.txt file + + **:SetupPython** is a subroutine that installs the given version of Python. We only support 3.7 + and 3.9 at present, but are adding more as needed. All that's really needed to add another version + is to provide a self-contained download of a 64-it python interpreter. + + **:InstallPythonPackages** walks through the supplied requirements file and installs the packages + using the given python version. There is a second parameter (here, 'onnxruntime') that allows + you to provide the name of a package which, if detected, can be assumed to mean that all packages + have been installed and that this step can be skipped. This provides an optimisation in case + you need to rerun the installation. + +3. Download the models + + **:Download** *source_base_url* *download_storage_path* *archive_filename* *extract_dir* *message* + will download *archive_filename* from the *source_base_url*, store it in *download_storage_path* + and extract it into a directory called *extract_dir*. It will also output *message* when and if + the download starts. *archive_filename* must be either a Zip or GZip file. + + **source_base_url** contains the URL to our S3 bucket that holds all our downloads.
+ **download_storage_path** will be /Installers/Dev/downloads.
+ **archive_filename** will, in this case, be 'rembg-models.zip'
+ **extract_dir** is 'BackgroundRemover' + + So we're downloading the models in *rembg-models.zip* from our S3 bucket and storing it in + */Installers/Dev/downloads/BackgroundRemover*. + + The reason we store the zip's in the /downloads dir instead of extracting directly to the module's + directory is so we can easily re-run the setup script without needing to re-download every asset + of every module. + + +4. Copy over the downloaded model to the module's working directory. Nothing too fancy here. The + models will be copied to *%modulePath%\%moduleAssetsDir%* which expands to + */src/AnalysisLayer/BackgroundRemover/models*. + +#### For Linux + +The script is essentially the same as the Windows version: + +``` bash +# Background Remover ::::::::::::::::::::::::::::::::::::::::::::::::::::::::: + +# The name of the dir containing the background remover module +moduleDir='BackgroundRemover' + +# The name of the dir containing the background remover models +modulePath="${analysisLayerPath}/${moduleDir}" + +# The name of the dir containing the background remover models +moduleAssetsDir='models' + +# The name of the file in our S3 bucket containing the assets required for this module +modelsAssetFilename='rembg-models.zip' + +setupPython 3.9 +installPythonPackages 3.9 "${modulePath}/requirements.txt" "onnxruntime" + +Download $storageUrl "${downloadPath}" $modelsAssetFilename "${moduleDir}" "Downloading models..." +if [ -d "${downloadPath}/${moduleDir}" ]; then + mv -f "${downloadPath}/${moduleDir}" "${modulePath}/${moduleAssetsDir}" +fi +``` + +Note that in our scripts we've included some small additions to allow you to force a re-download +if needed, but these are just details and so we've not included them here in order to keep things +simple. + +### The Module's API + +We should start with how we'll call the module. It can be whatever we like, so let's choose the +route /v1/image/removebackground. We'll pass in an image and a boolean "use_alphamatting" +which tells the code whether or not to use alpha matting (better for fuzzy edges). + +The return package will include a single item "imageBase64", which contains the base64 encoded +version of the image with background removed. + +### The Module's Source Code + +First, we create a folder under the modules directory and copy over the +code for the module. In this case we'll store the code in /src/AnalysisLayer/BackgroundRemover. +For convenience we'll create a Python project for those using Visual Studio (working in VS Code is +just as simple) + +The rembg module has one main method we need to call, named remove. We need to be able +to get the data from a client's request to this method, and then pass the results of this method +back to the client. For this, we'll use the AnalysisLayer/SDK/senseAI.py module to +help. + +We'll also create an adapter (which we'll call sense_rembg_adapter.py) for rembg that connects the +rembg remove method with our senseAI.py helper module. + +```python +# Import the SenseAI helper +import sys +sys.path.append("../SDK/Python") +from senseAI import SenseAIBackend, LogMethod + +# Import the rembg method we need to call +from rembg.bg import remove + +# Import the packages that we need to pass around the data +import base64 +from io import BytesIO +import json +import traceback + +# Create a SenseAI helper +senseAI = SenseAIBackend() + +# The main method containing the loop that will process the queue +def remove_background(thread_name): + + QUEUE_NAME = "removebackground_queue" + + # Keep checking the queue + while True: + + # Get a request from the API server queue. + queue_entries: [] = senseAI.getCommand(QUEUE_NAME) + + if len(queue_entries) > 0: + for queue_entry in queue_entries: + + # Get the data from the request + req_data: dict = json.JSONDecoder().decode(queue_entry) + + output: any = {} + try: + # Get the request ID so we can include it in the data we return to the server + req_id: str = req_data.get("reqid", "") + + # Use the senseAI helper methods to get the image data and alpha matting param + img = senseAI.getImageFromRequest(req_data, 0) + use_alphamatting: bool = bool(senseAI.getRequestValue(req_data, "use_alphamatting", "false")) + + # Perform the operation + processed = remove(img, use_alphamatting) + + # Convert the result of the operation into data that can be passed back as Json + buffered = BytesIO() + processed.save(buffered, format="PNG") + img_dataB64_bytes = base64.b64encode(buffered.getvalue()) + img_dataB64 = img_dataB64_bytes.decode("ascii"); + + # The return object. This can be whatever you like, as long as the client knows + output = {"success": True, "imageBase64": img_dataB64} + + except Exception: + err_trace = traceback.format_exc() + + output = { + "success": False, + "error": "unable to process the image", + "code": 500 + } + + # Use the senseAI helper to report the error + senseAI.log(LogMethod.Error | LogMethod.Cloud | LogMethod.Server, + { "process": "removebackground", + "file": "sense_rembg_adapter.py", + "method": "remove_background", + "message": err_trace, + "exception_type": "Exception"}) + + finally: + try: + # Send the response back to the server, who will in turn send + # it back to the client + senseAI.sendResponse(req_id, json.dumps(output)) + except Exception: + print("An exception occured") + + +if __name__ == "__main__": + senseAI.log(LogMethod.Info | LogMethod.Server, {"message":"RemoveBackground module started."}) + remove_background("main_removebackground") +``` + +This is the only code we've added. The rembg module has been copied and pasted as-is, and we're +reusing the senseAI.py helper class. Nothing else (code-wise) needs to be added. + +### The modulesettings.json file + +This file, in the BackgroundRemover folder, instructs the API server in how to +launch our new analysis service. + +```json +{ + "Modules": { + + "EnvironmentVariables": { + "U2NET_HOME": "%MODULES_PATH%/BackgroundRemover/models" // where to store the models + }, + + // The processes (typically the backend analysis processes) that are to be started when the + // server starts. They will be started in order of appearance. + "ModulesConfig": { + + "BackgroundRemoval": { + "Name": "Background Removal", + "Activate": true, + "Description": "Removes backgrounds from images.", + "FilePath": "BackgroundRemover\\sense_rembg_adapter.py", + "Platforms": [ "windows", "linux", "macos", "docker" ], + "Runtime": "python39", + + "RouteMaps": [ + // ... (explained below) + ] + } + } + } +} +``` + +The **EnvironmentVariables** section defines key/value pairs that will be used to set environment +variables that may be reqiured by the module. In this case, the path to the AI model files. This +is a value specific to, and defined by, the rembg module. + +MODULES_PATH is a macro that will expand to the location of the directory containing the modules. +In this case /src/AnalysisLayer. + +The **ModulesConfig** section contains the information on launching the module. +The name and description are self-explanatry. The FilePath is the path to the file to be +executed, relative to the MODULES_PATH directory. Activate sets whether or not this module will +be launched at runtime. + +Runtime defines what runtime will launch the file. We currently support dotnet (.NET), python37 +(python 3.7) and python39 (Python 3.9). If omitted, the SenseAI Server will attempt to +guess based on the FilePath. + +The Platforms array contains an entry for each platform on which the service can run. +Currently Windows, Linux, macOS and Docker are supported. + +The file also defines the API routes for the module under the **RouteMaps** section + +```json +{ + "Modules": { + "ModulesConfig": { + "BackgroundRemoval": { + "Name": "Background Removal", + "Description": "Removes backgrounds from images.", + + ... + + "RouteMaps": [ + { + "Path": "image/removebackground", + "Command": "removebackground", + "Queue": "removebackground_queue", + "Description": "Removes the background from images.", + "Inputs": [ ... ], + "Outputs": [...] + } + ] + } + } + } +} +``` + +Path is the API path, in this case _localhost:5000/v1/image/removebackground_. Remember that this +was what we chose (arbitrarily) as our API. It can be anything as long as it isn't currently in use. + +Command is the method in the API controller that will be called, in this case +removebackground. Queue is the name of the queue in the API server that will manage +the request for this service. + +In the adapter python module we wrote we had the code +```python + QUEUE_NAME = "removebackground_queue" + ``` + Queue, in the route map, should match this name. + + Description, Inputs and Outputs are purely documentation at this stage. + + ### The client that will call our new module + + A simple Javascript test harness can be used to demonstrate the new module + + ```javascript + // Assume we have a HTML INPUT type=file control with ID=fileChooser + var formData = new FormData(); + formData.append('image', fileChooser.files[0]); + formData.append("use_alphamatting", 'false'); + + var url = 'http://localhost:5000/v1/image/removebackground'; + + fetch(url, { method: "POST", body: formData}) + .then(response => { + if (response.ok) { + response.json().then(data => { + // img is an IMG tag that will display the result + img.src = "data:image/png;base64," + data.imageBase64; + }) + } + }) +``` +The project contains a file `test.html` that implements this, providing the UI to collect the +information and display the results. + + ## Install and test + + At this point we have a module, an install script and a test client. Let's give + it a run + + 0. Ensure you have the latest [SenseAI repo](https://github.com/codeproject/CodeProject.SenseAI) + downloaded. That has all the code we've talked about above already in place + + 1. Run the dev install script. This will ensure Python 3.9 is installed and + setup, and that the required Python modules are installed. + + 2. Launch the server by starting a new debug session in Visual Studio or VS Code. + + 3. In Debug, the SenseAI Dashboard is automatically launched when run. After the server starts all + the backend Modules, including the Background Removal module, the Dashboard will display the + modules it knows about. +![Module Status Panel](ModuleStatus.png) + + 4. Launch the `index.html` file in a browser, choose a file and click "Submit" button. The results + should be shown. +![Background Remover test.html page](test.html.jpg) + + ## What next? + + That's up to you. We've demonstrated a very simple AI module that removes the background from an image. The main work was + + 1. Ensuring you have the assets (eg models) available on a server so they can be downloaded + 2. Updating the install script so your assets can be downloaded abd moved into place, as well as + ensuring you have the necessary runtime and libraries installed + 3. Dropping in your module's code and writing an adapter so it can talk to the SenseAI Server + 4. Writing a modulesettings file that describes the API for your module + 5. Testing! Always the fun part. + + The possibilities on what you can add are almost limitless. Our goal is to enable you, as a + developer, to add your own AI modules easily, and in turn get the benefit of modules that others + have added. Mix and match, play with different sets of trained modules, experiment with settings + and see where you can take this. + + It's about learning and it's about having some fun. Go for it. \ No newline at end of file diff --git a/docs/Articles/dog_and_man_rembg.png b/docs/Articles/dog_and_man_rembg.png new file mode 100644 index 00000000..529a77ec Binary files /dev/null and b/docs/Articles/dog_and_man_rembg.png differ diff --git a/demos/TestData/pexels-thirdman-7268587 (1).jpg b/docs/Articles/pexels-thirdman-7268587.jpg similarity index 100% rename from demos/TestData/pexels-thirdman-7268587 (1).jpg rename to docs/Articles/pexels-thirdman-7268587.jpg diff --git a/docs/Articles/test.html.jpg b/docs/Articles/test.html.jpg new file mode 100644 index 00000000..f6b05f7b Binary files /dev/null and b/docs/Articles/test.html.jpg differ diff --git a/docs/Articles/test.html.png b/docs/Articles/test.html.png new file mode 100644 index 00000000..50a32c8a Binary files /dev/null and b/docs/Articles/test.html.png differ diff --git a/src/API/Common/Responses.cs b/src/API/Common/Responses.cs index 52a92de7..18ab3e36 100644 --- a/src/API/Common/Responses.cs +++ b/src/API/Common/Responses.cs @@ -1,4 +1,5 @@ -using System.Collections.Generic; +using System; +using System.Collections.Generic; using System.Text.Json.Serialization; namespace CodeProject.SenseAI.API.Common @@ -15,7 +16,6 @@ public class ResponseBase /// [JsonPropertyOrder(-5)] public bool success { get; set; } = false; - } public class SuccessResponse : ResponseBase @@ -70,12 +70,49 @@ public class LogListResponse : SuccessResponse public LogEntry[]? entries { get; set; } } + + /// + /// Represents that status of a process + /// + public class ProcessStatus + { + /// + /// Gets or sets the module Id + /// + public string? ModuleId { get; set; } + + /// + /// Gets or sets the module name + /// + public string? Name { get; set; } + + /// + /// Gets or sets the UTC time the module was started + /// + public DateTime? Started { get; set; } + + /// + /// Gets or sets the UTC time the module was last seen making a request to the backend queue + /// + public DateTime? LastSeen { get; set; } + + /// + /// Gets or sets a value indicating whether or not the module is running + /// + public bool Running { get; set; } + + /// + /// Gets or sets the number of requests processed + /// + public int Processed { get; set; } + } + /// /// The Response when requesting the status of the backend analysis services /// public class AnalysisServicesStatusResponse : SuccessResponse { - public KeyValuePair[]? statuses { get; set; } + public List? statuses { get; set; } } #pragma warning restore IDE1006 // Naming Styles diff --git a/src/API/Server/Backend/Backend.csproj b/src/API/Server/Backend/Backend.csproj index 1454815f..2501881b 100644 --- a/src/API/Server/Backend/Backend.csproj +++ b/src/API/Server/Backend/Backend.csproj @@ -5,7 +5,8 @@ disable enable CodeProject.SenseAI.Server.Backend - CodeProject.SenseAI.Server.Backend + 1.2.2.0 + CodeProject.SenseAI.Server.Backend
@@ -29,6 +30,7 @@ + diff --git a/src/API/Server/Backend/BackendRouteMap.cs b/src/API/Server/Backend/BackendRouteMap.cs index 5061b6c8..c3aafdea 100644 --- a/src/API/Server/Backend/BackendRouteMap.cs +++ b/src/API/Server/Backend/BackendRouteMap.cs @@ -1,36 +1,98 @@ using System; using System.Collections.Concurrent; using System.Collections.Generic; +using System.Linq; using System.Text; +using System.Text.Json.Serialization; namespace CodeProject.SenseAI.Server.Backend { + [JsonConverter(typeof(JsonStringEnumConverter))] + public enum RouteParameterType + { + Text, + Integer, + Float, + Boolean, + File, + Object + } + + public struct RouteParameterInfo + { + /// + /// Gets the Name of the parameter. + /// + public string Name { get; set; } + + /// + /// Gets the type of the parameter. + /// + public RouteParameterType Type { get; set; } + + /// + /// Get the description of the parameter. + /// + public string Description { get; set; } + } /// /// Holds the queue and command associated with a url. /// + // TODO: this should be a Record. public struct BackendRouteInfo { /// - /// Gets the name of the queue. + /// Gets the Path for the endpoint. /// - public string Queue { get; } + public string Path { get; set; } + + /// + /// Gets the name of the queue used by this endpoint. + /// + public string Queue { get; set; } /// /// Gets the name of the command. /// - public string Command { get; } + public string Command { get; set; } + + /// + /// Get the description of the endpoint. + /// + public string? Description { get; set; } + + /// + /// Gets the inputs parameter information. + /// + public RouteParameterInfo[]? Inputs { get; set; } + + /// + /// Gets the output parameter information. + /// + public RouteParameterInfo[]? Outputs { get; set; } /// /// Initializes a new instance of the BackendRouteInfo struct. /// - /// THe name of the Queue that the route will use. - /// The command string that will be passed as part of the data + /// The relative path of the endpoint. + /// THe name of the Queue that the route will use. + /// The command string that will be passed as part of the data /// sent to the queue. - public BackendRouteInfo(string queueName, string command) + /// A Description of the endpoint. + /// The input parameters information. + /// The output parameters information. + public BackendRouteInfo(string Path, string Queue, string Command, + string? Description = null, + RouteParameterInfo[]? Inputs = null, + RouteParameterInfo[]? Outputs = null) { - Queue = queueName; - Command = command; + this.Path = Path.ToLower(); + this.Queue = Queue; + this.Command = Command; + this.Description = Description; + this.Inputs = Inputs; + this.Outputs = Outputs; } } @@ -61,7 +123,22 @@ public bool TryGetValue(string path, out BackendRouteInfo routeInfo) /// The command that will be passed with the payload. public void Register(string path, string queueName, string command) { - _urlCommandMap[path.ToLower()] = new BackendRouteInfo(queueName, command); + BackendRouteInfo backendRouteInfo = new BackendRouteInfo(path, queueName, command); + Register(backendRouteInfo); + } + + public void Register(BackendRouteInfo info) + { + _urlCommandMap[info.Path] = info; + } + + /// + /// Gets a list of the url registrations. + /// + /// An IEnumerable> of the registrations. + public IEnumerable List() + { + return _urlCommandMap.Values.OrderBy(x => x.Path); } } } diff --git a/src/API/Server/Backend/CommandDispatcher.cs b/src/API/Server/Backend/CommandDispatcher.cs new file mode 100644 index 00000000..e5624b71 --- /dev/null +++ b/src/API/Server/Backend/CommandDispatcher.cs @@ -0,0 +1,78 @@ +// No longer used + +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Options; + +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +using CodeProject.SenseAI.AnalysisLayer.SDK; + +namespace CodeProject.SenseAI.API.Server.Backend +{ + /// + /// Creates and Dispatches commands appropriately. + /// + public class CommandDispatcher + { + private readonly QueueServices _queueServices; + private readonly QueueProcessingOptions _settings; + + /// + /// Initializes a new instance of the CommandDispatcher class. + /// + /// The Queue Services. + /// The Backend Options. + public CommandDispatcher(QueueServices queueServices, IOptions options) + { + _queueServices = queueServices; + _settings = options.Value; + } + + /* Currently unused, but we'll keep the code for the future just in case + /// + /// Saves a Form File to the temp directory. + /// + /// The Form File. + /// The saved file name. + public async Task SaveFileToTempAsync(IFormFile image) + { + string filename = $"{Guid.NewGuid():B}{Path.GetExtension(image.FileName)}"; + string tempDir = Path.GetTempPath(); + string dirPath = Path.Combine(tempDir, _settings.ImageTempDir); + + var directoryInfo = new DirectoryInfo(dirPath); + if (!directoryInfo.Exists) + directoryInfo.Create(); + + var filePath = Path.Combine(tempDir, _settings.ImageTempDir, filename); + var fileInfo = new FileInfo(filePath); + + try + { + using var imageStream = image.OpenReadStream(); + using var fileStream = fileInfo.OpenWrite(); + await imageStream.CopyToAsync(fileStream).ConfigureAwait(false); + } + catch + { + return null; + } + + return filePath; + } + */ + + public async Task QueueRequest(string queueName, string command, RequestPayload payload, + CancellationToken token = default) + { + var response = await _queueServices.SendRequestAsync(queueName, + new BackendRequest (command, payload), + token); + + return response; + } + } +} diff --git a/src/API/Server/Backend/QueueProcessingExtensions.cs b/src/API/Server/Backend/QueueProcessingExtensions.cs index 96ca5064..690e79f8 100644 --- a/src/API/Server/Backend/QueueProcessingExtensions.cs +++ b/src/API/Server/Backend/QueueProcessingExtensions.cs @@ -9,8 +9,7 @@ public static class QueueProcessingExtensions public static IServiceCollection AddQueueProcessing(this IServiceCollection services) { services.AddSingleton(); - services.AddSingleton(); - services.AddSingleton(); + services.AddSingleton(); services.AddSingleton(); return services; } diff --git a/src/API/Server/Backend/BackendOptions.cs b/src/API/Server/Backend/QueueProcessingOptions.cs similarity index 83% rename from src/API/Server/Backend/BackendOptions.cs rename to src/API/Server/Backend/QueueProcessingOptions.cs index 89935da2..9c27e234 100644 --- a/src/API/Server/Backend/BackendOptions.cs +++ b/src/API/Server/Backend/QueueProcessingOptions.cs @@ -5,7 +5,7 @@ namespace CodeProject.SenseAI.API.Server.Backend /// /// Options for Queue Processing /// - public class BackendOptions + public class QueueProcessingOptions { /// /// Get or set the max time to get a response. @@ -18,6 +18,8 @@ public class BackendOptions /// public int MaxQueueLength { get; set; } = 20; + /* Currently unused, but we'll keep the code for the future just in case public string ImageTempDir { get; set; } = "CodeProject.SenseAI.TempImages"; + */ } } diff --git a/src/API/Server/Backend/QueueServices.cs b/src/API/Server/Backend/QueueServices.cs index 927e242f..96eb3e9d 100644 --- a/src/API/Server/Backend/QueueServices.cs +++ b/src/API/Server/Backend/QueueServices.cs @@ -1,5 +1,4 @@  -using CodeProject.SenseAI.API.Common; using Microsoft.Extensions.Options; @@ -10,6 +9,9 @@ using System.Threading.Channels; using System.Threading.Tasks; +using CodeProject.SenseAI.AnalysisLayer.SDK; +using CodeProject.SenseAI.API.Common; + namespace CodeProject.SenseAI.API.Server.Backend { /// @@ -17,14 +19,14 @@ namespace CodeProject.SenseAI.API.Server.Backend /// public class QueueServices { - private readonly BackendOptions _settings; + private readonly QueueProcessingOptions _settings; // Keeping track of the queues being used. Will be created as needed. private readonly ConcurrentDictionary> _queues = new ConcurrentDictionary>(); private readonly ConcurrentDictionary> _pendingResponses = new ConcurrentDictionary>(); - public QueueServices(IOptions options) + public QueueServices(IOptions options) { _settings = options.Value; } diff --git a/src/API/Server/Backend/Text/TextBackendRequests.cs b/src/API/Server/Backend/Text/TextBackendRequests.cs deleted file mode 100644 index d0320187..00000000 --- a/src/API/Server/Backend/Text/TextBackendRequests.cs +++ /dev/null @@ -1,40 +0,0 @@ -// NO LONGER USED. - -namespace CodeProject.SenseAI.API.Server.Backend -{ -#pragma warning disable IDE1006 // Naming Styles - - /// - /// For Text Summary requests - /// - public class BackendTextSummaryRequest : BackendRequestBase - { - /// - /// The text to summarise. - /// - public string? text { get; set; } - - /// - /// The number of sentences to generate. - /// - public float? numsentences { get; set; } - - /// - /// Constructor - /// - public BackendTextSummaryRequest() - { - reqtype = "textsummary"; - } - - /// - /// Constructor - /// - public BackendTextSummaryRequest(string? text, int? numsentences) : this() - { - this.text = text; - this.numsentences = numsentences; - } - } -#pragma warning restore IDE1006 // Naming Styles -} \ No newline at end of file diff --git a/src/API/Server/Backend/Text/TextBackendResponses.cs b/src/API/Server/Backend/Text/TextBackendResponses.cs deleted file mode 100644 index 20100a09..00000000 --- a/src/API/Server/Backend/Text/TextBackendResponses.cs +++ /dev/null @@ -1,17 +0,0 @@ -// NO LONGER USED. - -namespace CodeProject.SenseAI.API.Server.Backend -{ -#pragma warning disable IDE1006 // Naming Styles - /// - /// Text Summary Response - /// - public class BackendTextSummaryResponse : BackendSuccessResponse - { - /// - /// Gets or sets the confidence in the recognition response - /// - public string? summary { get; set; } - } -#pragma warning restore IDE1006 // Naming Styles -} diff --git a/src/API/Server/Backend/Text/TextCommandDispatcher.cs b/src/API/Server/Backend/Text/TextCommandDispatcher.cs deleted file mode 100644 index 74e4963b..00000000 --- a/src/API/Server/Backend/Text/TextCommandDispatcher.cs +++ /dev/null @@ -1,61 +0,0 @@ -// NO LONGER USED. - -using Microsoft.Extensions.Options; - -using System; -using System.Threading; -using System.Threading.Tasks; - -namespace CodeProject.SenseAI.API.Server.Backend -{ - /// - /// Creates and Dispatches commands appropriately. - /// - public class TextCommandDispatcher - { - private const string SummaryQueueName = "summary_queue"; - - private readonly QueueServices _queueServices; - private readonly BackendOptions _settings; - - /// - /// Initializes a new instance of the CommandDispatcher class. - /// - /// The Queue Services. - /// The Backend Options. - public TextCommandDispatcher(QueueServices queueServices, IOptions options) - { - _queueServices = queueServices; - _settings = options.Value; - } - - /// - /// Executes a Summarize Text Command. - /// - /// The text to sumamrise. - /// The number of sentences to produce. - /// A Cancellation Token (optional). - /// A list of the detected objects or an error response. - public async Task SummarizeText(string? text, int numberOfSentences, - CancellationToken token = default) - { - if (string.IsNullOrWhiteSpace(text)) - return new BackendErrorResponse(-1, "No text was provided"); - - if (numberOfSentences <= 0) - return new BackendErrorResponse(-1, "Number of sentences to produce is invalid"); - - try - { - var response = await _queueServices.SendRequestAsync(SummaryQueueName, - new BackendTextSummaryRequest(text, numberOfSentences), - token).ConfigureAwait(false); - return response; - } - catch - { - return new BackendErrorResponse(-1, "Unable to summarize the text"); - } - } - } -} diff --git a/src/API/Server/Backend/Vision/VisionBackendRequests.cs b/src/API/Server/Backend/Vision/VisionBackendRequests.cs deleted file mode 100644 index 8782ca31..00000000 --- a/src/API/Server/Backend/Vision/VisionBackendRequests.cs +++ /dev/null @@ -1,217 +0,0 @@ -// NO LONGER USED. - -namespace CodeProject.SenseAI.API.Server.Backend -{ -#pragma warning disable IDE1006 // Naming Styles - - /// - /// For Object detection requests - /// - public class BackendObjectDetectionRequest : BackendRequestBase - { - /// - /// The image id. - /// - public string? imgid { get; set; } - - /// - /// The minimum confidence bar for a positive match to be determined. - /// - public float? minconfidence { get; set; } - - /// - /// Constructor - /// - public BackendObjectDetectionRequest() - { - reqtype = "detection"; - } - - /// - /// Constructor - /// - public BackendObjectDetectionRequest(string? imageId, float? minimumConfidence) : this() - { - imgid = imageId; - minconfidence = minimumConfidence; - } - } - - /// - /// For Face detection requests - /// - public class BackendFaceDetectionRequest : BackendRequestBase - { - /// - /// The image id. - /// - public string? imgid { get; set; } - - /// - /// The minimum confidence bar for a positive match to be determined. - /// - public float? minconfidence { get; set; } - - /// - /// Constructor - /// - public BackendFaceDetectionRequest() - { - reqtype = "detect"; - } - - /// - /// Constructor - /// - public BackendFaceDetectionRequest(string? imageId, float? minimumConfidence) : this() - { - imgid = imageId; - minconfidence = minimumConfidence; - } - } - - public class BackendFaceMatchRequest : BackendRequestBase - { - public string[]? images { get; set; } - - /// - /// Constructor - /// - public BackendFaceMatchRequest() - { - reqtype = "match"; - } - - /// - /// Constructor - /// - public BackendFaceMatchRequest(string image1Id, string image2Id) : this() - { - images = new string[] { image1Id, image2Id }; - } - } - - public class BackendSceneDetectionRequest : BackendRequestBase - { - /// - /// The image id. - /// - public string? imgid { get; set; } - - /// - /// Constructor - /// - public BackendSceneDetectionRequest() - { - reqtype = "detection"; - } - - /// - /// Constructor - /// - public BackendSceneDetectionRequest(string? imageId) : this() - { - imgid = imageId; - } - } - - public class BackendFaceRegisterRequest : BackendRequestBase - { - /// - /// Gets or sets the id of the user for whom the images represent - /// - public string? userid { get; set; } - - /// - /// Gets or sets the array of image Ids that were registered - /// - public string[]? images { get; set; } - - /// - /// Constructor - /// - public BackendFaceRegisterRequest() - { - reqtype = "register"; - } - - /// - /// Constructor - /// - public BackendFaceRegisterRequest(string userid, string[] imageids) : this() - { - this.userid = userid; - this.images = imageids; - } - } - - public class BackendFaceListRequest : BackendRequestBase - { - /// - /// Constructor - /// - public BackendFaceListRequest() - { - reqtype = "list"; - } - } - - public class BackendFaceDeleteRequest : BackendRequestBase - { - /// - /// Gets or sets the id of the user for whom the images represent - /// - public string? userid { get; set; } - - /// - /// Constructor - /// - public BackendFaceDeleteRequest() - { - reqtype = "delete"; - } - - /// - /// Constructor - /// - public BackendFaceDeleteRequest(string userid) : this() - { - this.userid = userid; - } - } - - /// - /// For face recognition requests - /// - public class BackendFaceRecognitionRequest : BackendRequestBase - { - /// - /// The image id. - /// - public string? imgid { get; set; } - - /// - /// The minimum confidence bar for a positive match to be determined. - /// - public float? minconfidence { get; set; } - - /// - /// Constructor - /// - public BackendFaceRecognitionRequest() - { - reqtype = "recognize"; - } - - /// - /// Constructor - /// - public BackendFaceRecognitionRequest(string? imageId, float? minimumConfidence) : this() - { - imgid = imageId; - minconfidence = minimumConfidence; - } - } - -#pragma warning restore IDE1006 // Naming Styles -} \ No newline at end of file diff --git a/src/API/Server/Backend/Vision/VisionBackendResponses.cs b/src/API/Server/Backend/Vision/VisionBackendResponses.cs deleted file mode 100644 index 47272ab5..00000000 --- a/src/API/Server/Backend/Vision/VisionBackendResponses.cs +++ /dev/null @@ -1,151 +0,0 @@ -// NO LONGER USED. - -namespace CodeProject.SenseAI.API.Server.Backend -{ -#pragma warning disable IDE1006 // Naming Styles - /// - /// Face Recognition Response - /// - public class BackendRecognitionResponse : BackendSuccessResponse - { - /// - /// Gets or sets the confidence in the recognition response - /// - public float confidence { get; set; } - - /// - /// Gets or sets the label to apply to the detected item - /// - public string? label { get; set; } - } - - /// - /// Face Recognition Response - /// - public class BackendSceneDetectResponse : BackendSuccessResponse - { - /// - /// Gets or sets the confidence in the recognition response - /// - public float confidence { get; set; } - - /// - /// Gets or sets the label to apply to the detected item - /// - public string? label { get; set; } - } - - /// - /// Face Match Response. - /// - public class BackendFaceMatchResponse : BackendSuccessResponse - { - /// - /// Gets or sets the similarity in an object comparison response - /// - public float similarity { get; set; } - } - - /// - /// A bounding box with confidence level. - /// - public class BoundingBoxPrediction - { - /// - /// Gets or sets the confidence in the detection response - /// - public float confidence { get; set; } - - /// - /// Gets or sets the lower y coordinate of the bounding box - /// - public int y_min { get; set; } - - /// - /// Gets or sets the lower x coordinate of the bounding box - /// - public int x_min { get; set; } - - /// - /// Gets or sets the upper y coordinate of the bounding box - /// - public int y_max { get; set; } - - /// - /// Gets or sets the upper x coordinate of the bounding box - /// - public int x_max { get; set; } - } - - /// - /// A Face Detection Prediction. - /// - public class FaceDetectionPrediction : BoundingBoxPrediction - { - } - - /// - /// A Face Recognition Prediction. - /// - public class FaceRecognitionPrediction : BoundingBoxPrediction - { - public string? userid { get; set; } - } - - /// - /// An Object Detection Prediction. - /// - public class DetectionPrediction : BoundingBoxPrediction - { - public string? label { get; set; } - } - - /// - /// A Registered Face Delete Response. - /// - public class BackendFaceDeleteResponse : BackendSuccessResponse - { - } - - /// - /// A Face Registration Response. - /// - public class BackendFaceRegisterResponse : BackendSuccessResponse - { - public string? message { get; set; } - } - - /// - /// A Face Detection Response. - /// - public class BackendFaceDetectionResponse : BackendSuccessResponse - { - public FaceDetectionPrediction[]? predictions { get; set; } - } - - /// - /// A Face Recognition Response. - /// - public class BackendFaceRecognitionResponse : BackendSuccessResponse - { - public FaceRecognitionPrediction[]? predictions { get; set; } - } - - /// - /// A List Registered Face Response - /// - public class BackendListRegisteredFacesResponse : BackendSuccessResponse - { - public string[]? faces { get; set; } - } - - /// - /// An Object Detection Response. - /// - public class BackendObjectDetectionResponse : BackendSuccessResponse - { - public DetectionPrediction[]? predictions { get; set; } - } - -#pragma warning restore IDE1006 // Naming Styles -} diff --git a/src/API/Server/Backend/Vision/VisionCommandDispatcher.cs b/src/API/Server/Backend/Vision/VisionCommandDispatcher.cs deleted file mode 100644 index 75e918ff..00000000 --- a/src/API/Server/Backend/Vision/VisionCommandDispatcher.cs +++ /dev/null @@ -1,312 +0,0 @@ -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Options; - -using System; -using System.Collections.Generic; -using System.IO; -using System.Threading; -using System.Threading.Tasks; - -namespace CodeProject.SenseAI.API.Server.Backend -{ - /// - /// Creates and Dispatches commands appropriately. - /// - public class VisionCommandDispatcher - { - private const string SceneQueueName = "scene_queue"; - private const string DetectionQueueName = "detection_queue"; - private const string FaceQueueName = "face_queue"; - - private readonly QueueServices _queueServices; - private readonly BackendOptions _settings; - - /// - /// Initializes a new instance of the CommandDispatcher class. - /// - /// The Queue Services. - /// The Backend Options. - public VisionCommandDispatcher(QueueServices queueServices, IOptions options) - { - _queueServices = queueServices; - _settings = options.Value; - } - - /// - /// Saves a Form File to the temp directory. - /// - /// The Form File. - /// The saved file name. - public async Task SaveFileToTempAsync(IFormFile image) - { - string filename = $"{Guid.NewGuid():B}{Path.GetExtension(image.FileName)}"; - string tempDir = Path.GetTempPath(); - string dirPath = Path.Combine(tempDir, _settings.ImageTempDir); - - var directoryInfo = new DirectoryInfo(dirPath); - if (!directoryInfo.Exists) - directoryInfo.Create(); - - var filePath = Path.Combine(tempDir, _settings.ImageTempDir, filename); - var fileInfo = new FileInfo(filePath); - - try - { - using var imageStream = image.OpenReadStream(); - using var fileStream = fileInfo.OpenWrite(); - await imageStream.CopyToAsync(fileStream).ConfigureAwait(false); - } - catch - { - return null; - } - - return filePath; - } - - public async Task QueueRequest(string queueName, string command, RequestPayload payload, - CancellationToken token = default) - { - var response = await _queueServices.SendRequestAsync(queueName, - new BackendRequest (command, payload), - token); - - return response; - } - - // TODO: All of these methods need to be simplified down to a single method, so that - // instead of "DetectObjects" we have the method "Infer" that accepts a payload of - // incoming information (binary data as a filename, confidence settings as floats - - // whatever info is required) and a queue name. This info is placed into the queue, - // and the result then collected and passed that back to the caller as a dynamic or - // JsonElement. - // This will allow us to dynamically add queues and methods via configuration instead - // of hard coding. - - /// - /// Executes a Detect Objects Command. - /// - /// The Form File for the image to be processed. - /// The minimum confidence for the detected objects. - /// A Cancellation Token (optional). - /// A list of the detected objects or an error response. - public async Task DetectObjects(IFormFile image, float? minConfidence, - CancellationToken token = default) - { - string? filename = await SaveFileToTempAsync(image).ConfigureAwait(false); - if (filename == null) - return new BackendErrorResponse(-1, "Unable to save file"); - - try - { - var response = await _queueServices.SendRequestAsync(DetectionQueueName, - new BackendObjectDetectionRequest(filename, minConfidence ?? 0.45F), - token).ConfigureAwait(false); - return response; - } - finally - { - var fileInfo = new FileInfo(filename); - fileInfo.Delete(); - } - } - - /// - /// Executes a Detect Scene Command. - /// - /// The Form File for the image to be processed. - /// A Cancellation Token (optional). - /// A label and confidence for the detected scene or an error response. - public async Task DetectScene(IFormFile image, - CancellationToken token = default) - { - string? filename = await SaveFileToTempAsync(image).ConfigureAwait(false); - if (filename == null) - return new BackendErrorResponse(-1, "Unable to save file"); - - try - { - var response = await _queueServices.SendRequestAsync(SceneQueueName, - new BackendSceneDetectionRequest(filename), token).ConfigureAwait(false); - return response; - } - finally - { - var fileInfo = new FileInfo(filename); - fileInfo.Delete(); - } - } - - - /// - /// Executes a Detect Faces Command. - /// - /// The Form File for the image to be processed. - /// The minimum confidence for the detected objects. - /// A Cancellation Token (optional). - /// A list of the detected Faces or an error response. - public async Task DetectFaces(IFormFile image, float? minConfidence, - CancellationToken token = default) - { - string? filename = await SaveFileToTempAsync(image).ConfigureAwait(false); - if (filename == null) - return new BackendErrorResponse(-1, "Unable to save file"); - - try - { - var response = await _queueServices.SendRequestAsync(FaceQueueName, - new BackendFaceDetectionRequest(filename, minConfidence ?? 0.4F) - , token).ConfigureAwait(false); - return response; - } - finally - { - var fileInfo = new FileInfo(filename); - fileInfo.Delete(); - } - } - - /// - /// Executes a Match Faces Command. - /// - /// The Form File for the image to be processed. - /// The Form File for the image to be processed. - /// A Cancellation Token (optional). - /// A value indicating the similarity of the two faces. - public async Task MatchFaces(IFormFile image1, IFormFile image2, - CancellationToken token = default) - { - string? filename1 = await SaveFileToTempAsync(image1).ConfigureAwait(false); - if (filename1 == null) - return new BackendErrorResponse(-1, "Unable to save file1"); - - string? filename2 = await SaveFileToTempAsync(image2); - if (filename2== null) - return new BackendErrorResponse(-1, "Unable to save file2"); - - try - { - var response = await _queueServices.SendRequestAsync(FaceQueueName, - new BackendFaceMatchRequest(filename1, filename2) - , token).ConfigureAwait(false); - return response; - } - finally - { - // delete the temporary files - var fileInfo = new FileInfo(filename1); - fileInfo.Delete(); - fileInfo = new FileInfo(filename2); - fileInfo.Delete(); - } - } - - /// - /// Executes a Register Faces Command. - /// - /// The images are of this user. - /// The Form File images of the given user. - /// A Cancellation Token (optional). - /// A value indicating the similarity of the two faces. - public async Task RegisterFaces(string userId, - IFormFileCollection images, - CancellationToken token = default) - { - var filenames = new List(images.Count); - foreach (IFormFile? image in images) - { - if (image == null) - continue; - - string? filename = await SaveFileToTempAsync(image).ConfigureAwait(false); - if (filename == null) - return new BackendErrorResponse(-1, $"Unable to save {image.FileName}"); - - filenames.Add(filename); - } - - try - { - var response = await _queueServices.SendRequestAsync(FaceQueueName, - new BackendFaceRegisterRequest(userId, filenames.ToArray()), - token).ConfigureAwait(false); - return response; - } - finally - { - // delete the temporary files - foreach (var filename in filenames) - { - var fileInfo = new FileInfo(filename); - fileInfo.Delete(); - } - } - } - - /// - /// Executes a Recognize Faces Command. - /// - /// The image potentially containing faces. - /// The minimum confidence for the detected faces. - /// A Cancellation Token (optional). - /// A list of the recognized Faces or an error response. - public async Task RecognizeFaces(IFormFile image, float? minConfidence, - CancellationToken token = default) - { - string? filename = await SaveFileToTempAsync(image).ConfigureAwait(false); - if (filename == null) - return new BackendErrorResponse(-1, "Unable to save file"); - - try - { - var response = await _queueServices.SendRequestAsync(FaceQueueName, - new BackendFaceRecognitionRequest(filename, minConfidence ?? 0.67f) - , token).ConfigureAwait(false); - return response; - } - finally - { - var fileInfo = new FileInfo(filename); - fileInfo.Delete(); - } - } - - /// - /// Executes a List Faces Command. - /// - /// A Cancellation Token (optional). - /// A list of the registered Faces or an error response. - public async Task ListFaces(CancellationToken token = default) - { - try - { - var response = await _queueServices.SendRequestAsync(FaceQueueName, - new BackendFaceListRequest(), token).ConfigureAwait(false); - return response; - } - finally - { - } - } - - /// - /// Executes a Delete Faces Command. - /// - /// The id of the user whose face data will be deleted. - /// A Cancellation Token (optional). - /// A list of the recognized Faces or an error response. - public async Task DeleteFaces(string userid, - CancellationToken token = default) - { - try - { - var response = await _queueServices.SendRequestAsync(FaceQueueName, - new BackendFaceDeleteRequest(userid), token).ConfigureAwait(false); - return response; - } - finally - { - } - } - } -} diff --git a/src/API/Server/FrontEnd/BackendProcessRunner.cs b/src/API/Server/FrontEnd/BackendProcessRunner.cs index e556c5ff..d25b4f6d 100644 --- a/src/API/Server/FrontEnd/BackendProcessRunner.cs +++ b/src/API/Server/FrontEnd/BackendProcessRunner.cs @@ -28,60 +28,75 @@ public class BackendProcessRunner : BackgroundService const string RootPathMarker = "%ROOT_PATH%"; const string ModulesPathMarker = "%MODULES_PATH%"; const string PlatformMarker = "%PLATFORM%"; + const string DataDirMarker = "%DATA_DIR%"; const string PythonBasePathMarker = "%PYTHON_BASEPATH%"; - const string Python37PathMarker = "%PYTHON37_PATH%"; + const string PythonPathMarker = "%PYTHON_PATH%"; + const string PythonRuntimeMarker = "%PYTHON_RUNTIME%"; - private readonly FrontendOptions _options; + private readonly FrontendOptions _frontendOptions; + private readonly ModuleCollection _modules; private readonly IConfiguration _config; private readonly ILogger _logger; private readonly QueueServices _queueServices; private readonly BackendRouteMap _routeMap; - private readonly Dictionary _backendEnvironmentVars = new(); private readonly List _runningProcesses = new(); private readonly string? _appDataDirectory; + private readonly ModuleCollection _emptyModuleList = new ModuleCollection(); + /// /// Gets the current platform name /// - private string Platform + public static string Platform { get { + bool inDocker = (Environment.GetEnvironmentVariable("DOTNET_RUNNING_IN_CONTAINER") ?? "") == "true"; + if (inDocker) + return "Docker"; // which in our case implies that we are running in Linux + // RuntimeInformation.GetPlatform() or RuntimeInformation.Platform would have been - // too easy... + // too easy. if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) - return OSPlatform.Windows.ToString(); + return "Windows"; if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) - return OSPlatform.OSX.ToString(); + return "macOS"; if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) - return OSPlatform.Linux.ToString(); + return "Linux"; if (RuntimeInformation.IsOSPlatform(OSPlatform.FreeBSD)) - return OSPlatform.FreeBSD.ToString(); + return "FreeBSD"; - return OSPlatform.Windows.ToString(); // Gotta be something... + return "Windows"; // Gotta be something... } } /// /// Gets a list of the startup processes. /// - public StartupProcess[] StartupProcesses + public ModuleCollection StartupProcesses { - get { return _options?.StartupProcesses ?? Array.Empty(); } + get { return _modules ?? _emptyModuleList; } } /// /// Gets a list of the processes names and statuses. /// - public Dictionary ProcessStatuses + public List ProcessStatuses { get { - return StartupProcesses.ToDictionary(cmd => cmd.Name ?? "Unknown", - cmd => cmd.Running ?? false); + return StartupProcesses.Select(entry => new ProcessStatus() + { + ModuleId = entry.Key ?? "Unknown", + Name = entry.Value.Name, + Started = entry.Value.Started, + LastSeen = entry.Value.LastSeen, + Running = entry.Value.Running, + Processed = entry.Value.Processed ?? 0 + }).ToList(); } } @@ -92,25 +107,30 @@ public Dictionary ProcessStatuses /// The status for the backend process, or false if the queue is invalid. public bool GetStatusForQueue(string queueName) { - return StartupProcesses.FirstOrDefault(cmd => cmd.Queues!.Any(x => string.Compare(x, queueName, true) == 0)) - ?.Running ?? false; + return StartupProcesses.FirstOrDefault(entry => + entry.Value.RouteMaps! + .Any(x => string.Compare(x.Queue, queueName, true) == 0) + ).Value?.Running ?? false; } /// /// Initialises a new instance of the BackendProcessRunner. /// /// The FrontendOptions + /// The Modules configuration. /// The application configuration. /// The Queue management service. /// The RouteMap service. /// The logger. public BackendProcessRunner(IOptions options, + IOptions modules, IConfiguration config, QueueServices queueServices, BackendRouteMap routeMap, ILogger logger) { - _options = options.Value; + _frontendOptions = options.Value; + _modules = modules.Value; _config = config; _logger = logger; _queueServices = queueServices; @@ -120,7 +140,6 @@ public BackendProcessRunner(IOptions options, _appDataDirectory = config.GetValue("ApplicationDataDir"); ExpandMacros(); - BuildBackendEnvironmentVar(); } /// @@ -153,7 +172,7 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken) try { - if (_options.StartupProcesses is null) + if (_modules is null) { _logger.LogInformation("No Background AI Modules specified"); Logger.Log("No Background AI Modules specified"); @@ -169,10 +188,10 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken) } // Setup routes. Do this first so they are active during debug without launching services. - foreach (var cmdInfo in _options.StartupProcesses!) + foreach (var cmdInfo in _modules!.Values) { // setup the routes for this module. - if (cmdInfo.Activate ?? false) + if (IsEnabled(cmdInfo)) { if (!(cmdInfo.RouteMaps?.Any() ?? false)) { @@ -181,7 +200,7 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken) else { foreach (var routeInfo in cmdInfo.RouteMaps!) - _routeMap.Register(routeInfo.Path, routeInfo.Queue, routeInfo.Command); + _routeMap.Register(routeInfo); } } } @@ -201,60 +220,39 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken) if (loggerIsValid) { - _logger.LogInformation($"Root Path: {_options.ROOT_PATH}"); - _logger.LogInformation($"Module Path: {_options.MODULES_PATH}"); - _logger.LogInformation($"Python3.7 Path: {_options.PYTHON37_PATH}"); - _logger.LogInformation($"Image Temp Dir: {Path.GetTempPath()}"); - - Logger.Log($"App directory {_options.ROOT_PATH}"); - Logger.Log($"Analysis modules in {_options.MODULES_PATH}"); + _logger.LogInformation($"Root Path: {_frontendOptions.ROOT_PATH}"); + _logger.LogInformation($"Module Path: {_frontendOptions.MODULES_PATH}"); + _logger.LogInformation($"Python Path: {_frontendOptions.PYTHON_PATH}"); + _logger.LogInformation($"Temp Dir: {Path.GetTempPath()}"); + _logger.LogInformation($"Data Dir: {_appDataDirectory}"); + + Logger.Log($"App directory {_frontendOptions.ROOT_PATH}"); + Logger.Log($"Analysis modules in {_frontendOptions.MODULES_PATH}"); } - foreach (var cmdInfo in _options.StartupProcesses!) + foreach (var entry in _modules!) { - cmdInfo.Running = false; - + ModuleConfig? module = entry.Value; + string moduleId = entry.Key; + if (stoppingToken.IsCancellationRequested) break; - bool activate = cmdInfo.Activate ?? false; - bool enabled = activate; - - // TODO: remove the Enable Flags - foreach (var envVar in cmdInfo.EnableFlags) - enabled = enabled || _config.GetValue(envVar, false); + bool enabled = IsEnabled(module!); if (!enabled) - Logger.Log($"Not starting {cmdInfo.Name}: Not set as enabled"); + Logger.Log($"Not starting {module.Name}: Not set as enabled"); - if (enabled && !cmdInfo.Platforms!.Any(platform => platform.ToLower() == Platform.ToLower())) - { - enabled = false; - Logger.Log($"Not starting {cmdInfo.Name}: Not anabled for {Platform}"); - } - - if (enabled && !string.IsNullOrEmpty(cmdInfo.Command)) + if (enabled && !string.IsNullOrEmpty(module.FilePath)) { // _logger.LogError($"Starting {cmdInfo.Command}"); // create the required Queues - foreach (var queueName in cmdInfo.Queues) - if (!string.IsNullOrWhiteSpace(queueName)) - _queueServices.EnsureQueueExists(queueName); - - // Setup the process we're going to launch - ProcessStartInfo? procStartInfo = new ProcessStartInfo($"{cmdInfo.Command}", $"{cmdInfo.Args ?? ""}") - { - UseShellExecute = false, - WorkingDirectory = cmdInfo.WorkingDirectory, - CreateNoWindow = false, - RedirectStandardOutput = true, - RedirectStandardError = true - }; + foreach (var routeInfo in module.RouteMaps) + if (!string.IsNullOrWhiteSpace(routeInfo.Queue)) + _queueServices.EnsureQueueExists(routeInfo.Queue); - // Set the environment variables - foreach (var kv in _backendEnvironmentVars) - procStartInfo.Environment.TryAdd(kv.Key, kv.Value); + ProcessStartInfo procStartInfo = CreateProcessStartInfo(module, moduleId); // Start the process try @@ -263,13 +261,13 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken) _logger.LogInformation($"Starting {procStartInfo.FileName} {procStartInfo.Arguments}"); Process? process = new Process(); - process.StartInfo = procStartInfo; + process.StartInfo = procStartInfo; process.EnableRaisingEvents = true; process.OutputDataReceived += (sender, data) => { string filename = string.Empty; if (sender is Process process) - filename = Path.GetFileName(process.StartInfo.Arguments); + filename = Path.GetFileName(process.StartInfo.Arguments.Replace("\"", "")); Logger.Log(string.IsNullOrEmpty(filename) ? data.Data : filename + ": " + data.Data); }; @@ -277,7 +275,7 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken) { string filename = string.Empty; if (sender is Process process) - filename = Path.GetFileName(process.StartInfo.Arguments); + filename = Path.GetFileName(process.StartInfo.Arguments.Replace("\"","")); Logger.Log(string.IsNullOrEmpty(filename) ? data.Data : filename + ": " + data.Data); }; @@ -291,41 +289,39 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken) process.BeginErrorReadLine(); if (loggerIsValid) - _logger.LogInformation($"Started {cmdInfo.Name} backend"); + _logger.LogInformation($"Started {module.Name} backend"); _runningProcesses.Add(process); - cmdInfo.Running = true; + module.Started = DateTime.UtcNow; - Logger.Log($"Started {cmdInfo.Name}"); + Logger.Log($"Started {module.Name}"); } else { if (loggerIsValid) - _logger.LogError($"Unable to start {cmdInfo.Name} backend"); + _logger.LogError($"Unable to start {module.Name} backend"); - Logger.Log($"Unable to start {cmdInfo.Name}"); + Logger.Log($"Unable to start {module.Name}"); } } catch (Exception ex) { if (loggerIsValid) { - _logger.LogError(ex, $"Error trying to start { cmdInfo.Name}"); + _logger.LogError(ex, $"Error trying to start { module.Name}"); Console.WriteLine("-------------------------------------------------"); - Console.WriteLine($"Working: {cmdInfo.WorkingDirectory}"); - Console.WriteLine($"Command: {cmdInfo.Command}"); - Console.WriteLine($"Args: {cmdInfo.Args}"); + Console.WriteLine($"FilePath: {module.FilePath}"); Console.WriteLine("-------------------------------------------------"); } - Logger.Log($"Error running {cmdInfo.Command} {cmdInfo.Args}"); + Logger.Log($"Error running {module.FilePath}"); #if DEBUG - if (Platform == "windows") + if (Platform == "Windows") Logger.Log($" Run /Installers/Dev/setup_dev_env_win.bat"); else Logger.Log($" In /Installers/Dev/, run 'bash setup_dev_env_linux.sh'"); - Logger.Log($" ** Did you setup the Development environment?"); + Logger.Log($" ** Did you setup the Development environment?"); #else Logger.Log($"Please check the SenseAI installation completed successfully"); #endif @@ -334,45 +330,133 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken) } } + private ProcessStartInfo CreateProcessStartInfo(ModuleConfig module, string moduleId) + { + string? command = ExpandOption(module.Command) ?? + GetCommandByRuntime(module.Runtime) ?? + GetCommandByExtension(module.FilePath); + + // Correcting for cross platform (win = \, linux = /) + string filePath = Path.Combine(_frontendOptions.MODULES_PATH!, + module.FilePath!.Replace('\\', Path.DirectorySeparatorChar)); + string? workingDirectory = Path.GetDirectoryName(filePath); + + // Setup the process we're going to launch + ProcessStartInfo? procStartInfo = new ProcessStartInfo($"{command}", $"\"{filePath}\"") + { + UseShellExecute = false, + WorkingDirectory = workingDirectory, + CreateNoWindow = false, + RedirectStandardOutput = true, + RedirectStandardError = true + }; + + // Set the environment variables + Dictionary environmentVars = BuildBackendEnvironmentVar(module); + foreach (var kv in environmentVars) + procStartInfo.Environment.TryAdd(kv.Key, kv.Value); + + // Queue is currently route specific, so we can't do this at the moment + // procStartInfo.Environment.TryAdd("MODULE_QUEUE", cmdInfo.QueueName); + procStartInfo.Environment.TryAdd("MODULE_ID", moduleId); + + return procStartInfo; + } + + private bool IsEnabled(ModuleConfig module) + { + // Has it been explicitely activated? + bool enabled = module.Activate ?? false; + + // Check the EnableFlags as backup. TODO: remove the Enable Flags + if (module.EnableFlags?.Length > 0) + foreach (var envVar in module.EnableFlags) + enabled = enabled || _config.GetValue(envVar, false); + + // If the platform list doesn't include the current platform, then veto the activation + if (enabled && !module.Platforms!.Any(platform => platform.ToLower() == Platform.ToLower())) + enabled = false; + + return enabled; + } + + private string? GetCommandByRuntime(string? runtime) + { + if (runtime is null) + return null; + + runtime = runtime.ToLower(); + + // HACK: Ultimately we will have a set of "runtime" modules which will install and + // register the runtimes we use. The registration will include the runtime name + // (eg "python39") and the path to the runtime's launcher. For now we're going to + // just hardcode Python and .NET support. + + // If it is "Python" then use our default Python location (in this case, python 3.7) + if (runtime == "python") + runtime = "python37"; + + // If it is a PythonNN command then replace our marker in the default python path to + // match the requested interpreter location + if (runtime.StartsWith("python") && !runtime.StartsWith("python3.")) + return _frontendOptions.PYTHON_PATH?.Replace(PythonRuntimeMarker, + // HACK: on docker the python command is in the format of python3.N + Platform == "Docker" ? runtime.Replace("python3", "python3.") : runtime); + + if (runtime == "dotnet") + return "dotnet"; + + return null; + } + + private string? GetCommandByExtension(string? filename) + { + if (filename is null) + return null; + + // HACK: Ultimately we will have a set of "runtime" modules which will install and + // register the runtimes we use. The registration will include the runtime name + // (eg "dotnet") and the file extensions that the runtime can unambiguously handle. + // The "python39" runtime, for example, may want to register .py, but so would python37. + // "dotnet" is welcome to register .dll as long as no other runtime module wants .dll too. + + switch (Path.GetExtension(filename)) + { + case ".py": return GetCommandByRuntime("python"); + case ".dll": return "dotnet"; + default: + throw new Exception("If neither Runtime nor Command is specified then FilePath must have an extension of '.py' or '.dll'."); + } + } + /// /// Expands all the directory markers in the options. /// private void ExpandMacros() { - if (_options is null) + if (_frontendOptions is null) return; - // Quick Sanity check - // Console.WriteLine($"Initial ROOT_PATH = {_options.ROOT_PATH}"); - // Console.WriteLine($"Initial MODULES_PATH = {_options.MODULES_PATH}"); - // Console.WriteLine($"Initial PYTHON_BASEPATH = {_options.PYTHON_BASEPATH}"); - // Console.WriteLine($"Initial PYTHON37_PATH = {_options.PYTHON37_PATH}"); - - // For Macro expansion in appsettings settings we have PYTHON37_PATH which depends on + // For Macro expansion in appsettings settings we have PYTHON_PATH which depends on // PYTHON_BASEPATH which usually depends on MODULES_PATH and both depend on ROOT_PATH. // Get and expand each of these in the correct order. - _options.ROOT_PATH = GetRootPath(_options.ROOT_PATH); - _options.MODULES_PATH = Path.GetFullPath(ExpandOption(_options.MODULES_PATH)!); - _options.PYTHON_BASEPATH = Path.GetFullPath(ExpandOption(_options.PYTHON_BASEPATH)!); - _options.PYTHON37_PATH = Path.GetFullPath(ExpandOption(_options.PYTHON37_PATH)!); + _frontendOptions.ROOT_PATH = GetRootPath(_frontendOptions.ROOT_PATH); + _frontendOptions.MODULES_PATH = Path.GetFullPath(ExpandOption(_frontendOptions.MODULES_PATH)!); + + _frontendOptions.PYTHON_BASEPATH = Path.GetFullPath(ExpandOption(_frontendOptions.PYTHON_BASEPATH)!); + _frontendOptions.PYTHON_PATH = ExpandOption(_frontendOptions.PYTHON_PATH); + + // Fix the slashes + if (_frontendOptions.PYTHON_PATH?.Contains(Path.DirectorySeparatorChar) ?? false) + _frontendOptions.PYTHON_PATH = Path.GetFullPath(_frontendOptions.PYTHON_PATH); Console.WriteLine("------------------------------------------------------------------"); - Console.WriteLine($"Expanded ROOT_PATH = {_options.ROOT_PATH}"); - Console.WriteLine($"Expanded MODULES_PATH = {_options.MODULES_PATH}"); - Console.WriteLine($"Expanded PYTHON_BASEPATH = {_options.PYTHON_BASEPATH}"); - Console.WriteLine($"Expanded PYTHON37_PATH = {_options.PYTHON37_PATH}"); + Console.WriteLine($"Expanded ROOT_PATH = {_frontendOptions.ROOT_PATH}"); + Console.WriteLine($"Expanded MODULES_PATH = {_frontendOptions.MODULES_PATH}"); + Console.WriteLine($"Expanded PYTHON_BASEPATH = {_frontendOptions.PYTHON_BASEPATH}"); + Console.WriteLine($"Expanded PYTHON_PATH = {_frontendOptions.PYTHON_PATH}"); Console.WriteLine("------------------------------------------------------------------"); - - if (_options.StartupProcesses is not null) - { - foreach (var backend in _options.StartupProcesses) - { - backend.Command = ExpandOption(backend.Command); - backend.WorkingDirectory = ExpandOption(backend.WorkingDirectory); - backend.Args = ExpandOption(backend.Args); - } - } } /// @@ -400,58 +484,6 @@ private string GetRootPath(string? configRootPath) // converts relative URLs and squashes the path to he correct absolute path rootPath = Path.GetFullPath(rootPath); return rootPath; - - /* ALTERNATIVE: We can dynamically hunt for the correct path if we're in the mood. - - // If we're in Development then we can dynamically find the correct root path. But this - // is fragile and a Really Bad Idea. Trust configuration values. The are adaptable. - string? aspNetEnv = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT"); - if (aspNetEnv != null && aspNetEnv == "Development") - { - // In dev we assume the application will be under the /working-dir/src/API/FrontEnd/ - // directory, buried deeeep in the /bin/Debug/net/ etc etc bowels of the folder - // system. Dig to the surface. - - DirectoryInfo currentDir = new(AppContext.BaseDirectory); - if (_options.API_DIRNAME != null) - { - // Grab a shovel and dig up towards the API directory - while (currentDir.Parent != null && - currentDir.Name.ToLower() != _options.API_DIRNAME.ToLower()) - { - currentDir = currentDir.Parent; - } - - // Up to the src directory - if (currentDir != null && currentDir.Parent != null) - currentDir = currentDir.Parent; - - // Up to the root directory - if (currentDir != null && currentDir.Parent != null) - currentDir = currentDir.Parent; - } - - if (string.IsNullOrEmpty(currentDir?.FullName)) // No luck. Fall back to default. - rootPath = defaultPath; - else - rootPath = currentDir.FullName; - } - else - { - // Check if the app's launch directory is "Server" meaning we're in production - DirectoryInfo currentDir = new(AppContext.BaseDirectory); - if (currentDir.Name.ToLower() == _options.SERVEREXE_DIRNAME && currentDir.Parent != null) - rootPath = currentDir.Parent.FullName; - else - rootPath = defaultPath; - } - - if (rootPath.StartsWith("..")) - rootPath = Path.Combine(AppContext.BaseDirectory, rootPath!); - - rootPath = Path.GetFullPath(rootPath); // converts ".."'s to the correct relative path - return rootPath; - */ } /// @@ -464,12 +496,14 @@ private string GetRootPath(string? configRootPath) if (value is null) return null; - value = value.Replace(ModulesPathMarker, _options.MODULES_PATH); - value = value.Replace(RootPathMarker, _options.ROOT_PATH); + value = value.Replace(ModulesPathMarker, _frontendOptions.MODULES_PATH); + value = value.Replace(RootPathMarker, _frontendOptions.ROOT_PATH); value = value.Replace(PlatformMarker, Platform.ToLower()); - value = value.Replace(PythonBasePathMarker, _options.PYTHON_BASEPATH); - value = value.Replace(Python37PathMarker, _options.PYTHON37_PATH); + value = value.Replace(PythonBasePathMarker, _frontendOptions.PYTHON_BASEPATH); + value = value.Replace(PythonPathMarker, _frontendOptions.PYTHON_PATH); + value = value.Replace(DataDirMarker, _appDataDirectory); + // Correct for cross platform (win = \, linux = /) value = value.Replace('\\', Path.DirectorySeparatorChar); return value; @@ -478,28 +512,35 @@ private string GetRootPath(string? configRootPath) /// /// Creates the collection of backend environment variables. /// - private void BuildBackendEnvironmentVar() + private Dictionary BuildBackendEnvironmentVar(ModuleConfig module) { - if (_options.BackendEnvironmentVariables != null) - { - foreach (var entry in _options.BackendEnvironmentVariables) - _backendEnvironmentVars.Add(entry.Key, ExpandOption(entry.Value.ToString())); - - // A bit of a hack for the Vision Python legacy module that requires a directory - // for storing a SQLite DB. We'll force it to store the data in the standard - // application data directory as per the current OS. This is required because the - // app may very well be installed in a directory that doesn't provide write - // permission. So: have the writes done in a spot where we know we have permission. - _backendEnvironmentVars["DATA_DIR"] = _appDataDirectory; - - Console.WriteLine("Setting Environment variables"); - Console.WriteLine("------------------------------------------------------------------"); - foreach (var envVar in _backendEnvironmentVars) + Dictionary processEnvironmentVars = new(); + + if (_frontendOptions.EnvironmentVariables is not null) + foreach (var entry in _frontendOptions.EnvironmentVariables) { - Console.WriteLine($"{envVar.Key.PadRight(16)} = {envVar.Value}"); + if (processEnvironmentVars.ContainsKey(entry.Key)) + processEnvironmentVars[entry.Key] = ExpandOption(entry.Value.ToString()); + else + processEnvironmentVars.Add(entry.Key, ExpandOption(entry.Value.ToString())); } - Console.WriteLine("------------------------------------------------------------------"); - } + + if (module.EnvironmentVariables is not null) + foreach (var entry in module.EnvironmentVariables) + { + if (processEnvironmentVars.ContainsKey(entry.Key)) + processEnvironmentVars[entry.Key] = ExpandOption(entry.Value.ToString()); + else + processEnvironmentVars.Add(entry.Key, ExpandOption(entry.Value.ToString())); + } + + Console.WriteLine($"Setting Environment variables for {module.Name}"); + Console.WriteLine("------------------------------------------------------------------"); + foreach (var envVar in processEnvironmentVars) + Console.WriteLine($"{envVar.Key.PadRight(16)} = {envVar.Value}"); + Console.WriteLine("------------------------------------------------------------------"); + + return processEnvironmentVars; } } @@ -518,6 +559,7 @@ public static IServiceCollection AddBackendProcessRunner(this IServiceCollection IConfiguration configuration) { services.Configure(configuration.GetSection("FrontEndOptions")); + services.Configure(configuration.GetSection("Modules")); services.AddHostedService(); return services; } diff --git a/src/API/Server/FrontEnd/Controllers/ProxyController.cs b/src/API/Server/FrontEnd/Controllers/ProxyController.cs index 3dceb9d0..cf78b170 100644 --- a/src/API/Server/FrontEnd/Controllers/ProxyController.cs +++ b/src/API/Server/FrontEnd/Controllers/ProxyController.cs @@ -10,6 +10,7 @@ using System.Text; using System.Threading.Tasks; +using CodeProject.SenseAI.AnalysisLayer.SDK; namespace CodeProject.SenseAI.API.Server.Frontend.Controllers { // ------------------------------------------------------------------------------ @@ -35,15 +36,15 @@ namespace CodeProject.SenseAI.API.Server.Frontend.Controllers [ApiController] public class ProxyController : ControllerBase { - private readonly VisionCommandDispatcher _dispatcher; - private readonly BackendRouteMap _routeMap; + private readonly CommandDispatcher _dispatcher; + private readonly BackendRouteMap _routeMap; /// /// Initializes a new instance of the VisionController class. /// /// The Command Dispatcher instance. /// The Route Manager - public ProxyController(VisionCommandDispatcher dispatcher, BackendRouteMap routeMap) + public ProxyController(CommandDispatcher dispatcher, BackendRouteMap routeMap) { _dispatcher = dispatcher; _routeMap = routeMap; @@ -76,7 +77,30 @@ public async Task Post(string path) return new ObjectResult(response); } else - return BadRequest(); + return NotFound(); + } + + /// + /// Associates a url and command with a queue. + /// + /// The endpoint information to register. + /// OK or Http Error Status response. + // TODO: Should add Name, Description, Inputs and Outputs to the registration + [HttpPost("Register")] + public IActionResult Register([FromBody] BackendRouteInfo routeInfo) + { + _routeMap.Register(routeInfo); + return Ok(); + } + + /// + /// List the URL registrations. + /// + /// The List of URL registrations. + [HttpGet("List")] + public IActionResult List() + { + return new ObjectResult(_routeMap.List()); } private RequestPayload CreatePayload(BackendRouteInfo routeInfo) @@ -86,6 +110,7 @@ private RequestPayload CreatePayload(BackendRouteInfo routeInfo) var payload = new RequestPayload { command = routeInfo.Command, + queue = routeInfo.Queue, values = requestValues, files = form.Files.Select(x => new RequestFormFile { diff --git a/src/API/Server/FrontEnd/Controllers/QueueController.cs b/src/API/Server/FrontEnd/Controllers/QueueController.cs index f76039a2..9e2637e3 100644 --- a/src/API/Server/FrontEnd/Controllers/QueueController.cs +++ b/src/API/Server/FrontEnd/Controllers/QueueController.cs @@ -1,10 +1,16 @@ -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; - +using System; +using System.Collections.Generic; using System.IO; +using System.Linq; using System.Threading; using System.Threading.Tasks; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; + +using CodeProject.SenseAI.AnalysisLayer.SDK; using CodeProject.SenseAI.API.Server.Backend; namespace CodeProject.SenseAI.API.Server.Frontend.Controllers @@ -31,14 +37,22 @@ public QueueController(QueueServices queueService) /// Gets a command from the named queue if available. /// /// The name of the Queue. + /// The ID of the module making the request /// The aborted request token. /// The Request Object. [HttpGet("{name}", Name = "GetRequestFromQueue")] [Produces("application/json")] [ProducesResponseType(StatusCodes.Status200OK)] [ProducesResponseType(StatusCodes.Status400BadRequest)] - public async Task GetQueue(string name, CancellationToken token) + public async Task GetQueue([FromRoute] string name, [FromQuery] string moduleId, + CancellationToken token) { + // TODO: Get the id of the module that made this request from the 'moduleId' querystring + // parameter and store the current time in a map of {moduleId : report_time} for health + // reporting. + // string? moduleId = Request.QueryString.Value["moduleId"]; + UpdateProcessStatus(moduleId); + BackendRequestBase? response = await _queueService.DequeueRequestAsync(name, token); return new OkObjectResult(response); } @@ -47,11 +61,12 @@ public async Task GetQueue(string name, CancellationToken token) /// Sets the response for a command from the named queue if available. /// /// The id of the request the response is for. + /// The ID of the module making the request /// The Request Object. [HttpPost("{reqid}", Name = "SetResponseInQueue")] [ProducesResponseType(StatusCodes.Status200OK)] [ProducesResponseType(StatusCodes.Status400BadRequest)] - public async Task SetResponse(string reqid) + public async Task SetResponse(string reqid, [FromQuery] string moduleId) { string? response = null; using var bodyStream = HttpContext.Request.Body; @@ -61,11 +76,34 @@ public async Task SetResponse(string reqid) response = await textreader.ReadToEndAsync(); } + UpdateProcessStatus(moduleId, true); + var success = _queueService.SetResult(reqid, response); if (!success) return BadRequest("failure to set response."); else return Ok("Response saved."); } + + private void UpdateProcessStatus(string moduleId, bool incrementProcessCount = false) + { + if (string.IsNullOrEmpty(moduleId)) + return; + + // Get the backend processor (DI won't work here due to the order things get fired up + // in Main. + var backend = HttpContext.RequestServices.GetServices() + .OfType() + .FirstOrDefault(); + if (backend is null) + return; + + if (backend.StartupProcesses.ContainsKey(moduleId)) + { + backend.StartupProcesses[moduleId].LastSeen = DateTime.UtcNow; + if (incrementProcessCount) + backend.StartupProcesses[moduleId].Processed++; + } + } } } diff --git a/src/API/Server/FrontEnd/Controllers/StatusController.cs b/src/API/Server/FrontEnd/Controllers/StatusController.cs index 7708178c..f2b8f4a4 100644 --- a/src/API/Server/FrontEnd/Controllers/StatusController.cs +++ b/src/API/Server/FrontEnd/Controllers/StatusController.cs @@ -1,13 +1,13 @@ using System.Linq; +using System.Threading.Tasks; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; +using Microsoft.AspNetCore.Hosting; using CodeProject.SenseAI.API.Common; -using System.Threading.Tasks; -using Microsoft.AspNetCore.Hosting; namespace CodeProject.SenseAI.API.Server.Frontend.Controllers { @@ -157,10 +157,13 @@ public ResponseBase ListAnalysisStatus() if (backend is null) return new ErrorResponse("Unable to locate backend services"); + if (backend.ProcessStatuses is null) + return new ErrorResponse("No backend processes have been registered"); + // List them out and return the status var response = new AnalysisServicesStatusResponse { - statuses = backend.ProcessStatuses.ToArray() + statuses = backend.ProcessStatuses }; return response; diff --git a/src/API/Server/FrontEnd/Controllers/TextController.cs b/src/API/Server/FrontEnd/Controllers/TextController.cs deleted file mode 100644 index 6c20d6bb..00000000 --- a/src/API/Server/FrontEnd/Controllers/TextController.cs +++ /dev/null @@ -1,73 +0,0 @@ -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; - -using System.Threading.Tasks; -using System.Threading; - -using CodeProject.SenseAI.API.Server.Backend; -using CodeProject.SenseAI.API.Common; -using System; - -namespace CodeProject.SenseAI.API.Server.Frontend.Controllers -{ - /// - /// The Vision Operations - /// - [Route("v1/text")] - [ApiController] - // [DisableResponseChunking] - public class TextController : ControllerBase - { - private readonly TextCommandDispatcher _dispatcher; - - /// - /// Initializes a new instance of the VisionController class. - /// - /// The Command Dispatcher instance. - public TextController(TextCommandDispatcher dispatcher) - { - _dispatcher = dispatcher; - } - - /// - /// Summarizes text. - /// - /// The Form file object. - /// The number of sentences to produce for the summary. - /// The injected request aborted cancellation token. - /// A Response containing the summary of the text. - /// Returns text summary, if any. - /// If the no text provided. - [HttpPost("summarize", Name = "SummarizeText")] - [Consumes("multipart/form-data")] - [Produces("application/json")] - [ProducesResponseType(StatusCodes.Status200OK)] - [ProducesResponseType(StatusCodes.Status400BadRequest)] - public async Task SummarizeText([FromForm] string? text, - [FromForm] int? num_sentences, - CancellationToken token) - { - var backendResponse = await _dispatcher.SummarizeText(text, num_sentences ?? 2, token); - - if (backendResponse is BackendTextSummaryResponse summaryResponse) - { - var response = new TextSummaryResponse - { - summary = summaryResponse.summary - }; - - return response; - } - - return HandleErrorResponse(backendResponse); - } - - private static ErrorResponse HandleErrorResponse(Object backendResponse) - { - if (backendResponse is BackendErrorResponse errorResponse) - return new ErrorResponse(errorResponse.error, errorResponse.code); - - return new ErrorResponse("unexpected response", -1); - } - } -} diff --git a/src/API/Server/FrontEnd/Controllers/VisionController.cs b/src/API/Server/FrontEnd/Controllers/VisionController.cs deleted file mode 100644 index cf07e355..00000000 --- a/src/API/Server/FrontEnd/Controllers/VisionController.cs +++ /dev/null @@ -1,332 +0,0 @@ -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; - -using System.Threading.Tasks; -using System.Threading; -using System.Linq; - -using CodeProject.SenseAI.API.Server.Backend; -using CodeProject.SenseAI.API.Common; -using System; - -namespace CodeProject.SenseAI.API.Server.Frontend.Controllers -{ - /// - /// The Vision Operations - /// - [Route("v1/vision")] - [ApiController] - [DisableResponseChunking] - public class VisionController : ControllerBase - { - private readonly VisionCommandDispatcher _dispatcher; - - /// - /// Initializes a new instance of the VisionController class. - /// - /// The Command Dispatcher instance. - public VisionController(VisionCommandDispatcher dispatcher) - { - _dispatcher = dispatcher; - } - - /// - /// Detect the Scene from an image. - /// - /// The Form file object. - /// The injected request aborted cancellation token. - /// A Response describing the scene with confidence level. - /// Returns detected scene information, if any. - /// If the image in the Fomm data is null. - [HttpPost("scene", Name = "DetectScene")] - [Consumes("multipart/form-data")] - [Produces("application/json")] - [ProducesResponseType(StatusCodes.Status200OK)] - [ProducesResponseType(StatusCodes.Status400BadRequest)] - public async Task DetectScene([FromForm] IFormFile image, - CancellationToken token) - { - var backendResponse = await _dispatcher.DetectScene(image, token); - - if (backendResponse is BackendSceneDetectResponse detectResponse) - { - var response = new DetectSceneResponse - { - confidence = detectResponse.confidence, - label = detectResponse.label - }; - - return response; - } - - return HandleErrorResponse(backendResponse); - } - - private static ErrorResponse HandleErrorResponse(Object backendResponse) - { - if (backendResponse is BackendErrorResponse errorResponse) - return new ErrorResponse(errorResponse.error, errorResponse.code); - - return new ErrorResponse("unexpected response", -1); - } - - /// - /// Detect objects in an image. - /// - /// The Form file object. - /// The minimum confidence level. Defaults to 0.4. - /// The injected request aborted cancellation token. - /// A list of object names, positions, and confidence levels. - /// Returns the list of detected object information, if any. - /// If the image in the Form data is null. - [HttpPost("detection", Name = "DetectObjects")] - [Consumes("multipart/form-data")] - [Produces("application/json")] - [ProducesResponseType(StatusCodes.Status200OK)] - [ProducesResponseType(StatusCodes.Status400BadRequest)] - public async Task DetectObjects([FromForm] IFormFile image, - [FromForm] float? min_confidence, - CancellationToken token) - { - var backendResponse = await _dispatcher.DetectObjects(image, min_confidence, token); - if (backendResponse is BackendObjectDetectionResponse detectResponse) - { - var response = new DetectObjectsResponse - { - predictions = detectResponse?.predictions - ?.OrderBy(prediction => prediction.confidence) - ?.Select(prediction => new DetectedObject - { - label = prediction.label, - confidence = prediction.confidence, - x_min = (int)(prediction.x_min), - x_max = (int)(prediction.x_max), - y_min = (int)(prediction.y_min), - y_max = (int)(prediction.y_max), - - }) - ?.ToArray() - }; - - return response; - } - - return HandleErrorResponse(backendResponse); - } - - /// - /// Detect Faces in an image. - /// - /// The Form file object. - /// The minimum confidence level. Defaults to 0.4. - /// The injected request aborted cancellation token. - /// A list of face positions, and confidence levels. - /// Returns the list of detected face information, if any. - /// If the image in the Form data is null. - [HttpPost("face", Name = "DetectFaces")] - [Consumes("multipart/form-data")] - [Produces("application/json")] - [ProducesResponseType(StatusCodes.Status200OK)] - [ProducesResponseType(StatusCodes.Status400BadRequest)] - public async Task DetectFaces([FromForm] IFormFile image, - [FromForm] float? min_confidence, - CancellationToken token) - { - var backendResponse = await _dispatcher.DetectFaces(image, min_confidence, token); - if (backendResponse is BackendFaceDetectionResponse detectResponse) - { - var response = new DetectFacesResponse - { - predictions = detectResponse?.predictions - ?.OrderBy(prediction => prediction.confidence) - ?.Select(prediction => new DetectedFace - { - confidence = prediction.confidence, - x_min = (int)(prediction.x_min), - x_max = (int)(prediction.x_max), - y_min = (int)(prediction.y_min), - y_max = (int)(prediction.y_max), - - }) - ?.ToArray() - }; - - return response; - } - - return HandleErrorResponse(backendResponse); - } - - /// - /// Match Faces in two different images. - /// - /// The Form file object. - /// The Form file object. - /// The injected request aborted cancellation token. - /// A list of object names, positions, and confidence levels. - /// Similarity of the two faces. - /// If the image in the Form data is null. - [HttpPost("face/match", Name = "MatchFaces")] - [Consumes("multipart/form-data")] - [Produces("application/json")] - [ProducesResponseType(StatusCodes.Status200OK)] - [ProducesResponseType(StatusCodes.Status400BadRequest)] - public async Task MatchFaces(IFormFile image1, IFormFile image2, - CancellationToken token) - { - var backendResponse = await _dispatcher.MatchFaces(image1, image2, token); - if (backendResponse is BackendFaceMatchResponse matchResponse) - { - var response = new MatchFacesResponse - { - similarity = matchResponse.similarity - }; - - return response; - } - - return HandleErrorResponse(backendResponse); - } - - /// - /// Register Face for Recognition. - /// - /// The if of the user for whom to register the images. - /// The injected request aborted cancellation token. - /// A list of object names, positions, and confidence levels. - /// Success message. - /// If the image in the Form data is null. - /// This method should be a PUT, not a POST operation. We've left it as POST in - /// order to maintain compatibility with the original DeepStack code. - [HttpPost("face/register", Name = "RegisterFace")] - [Consumes("multipart/form-data")] - [Produces("application/json")] - [ProducesResponseType(StatusCodes.Status200OK)] - [ProducesResponseType(StatusCodes.Status400BadRequest)] - public async Task RegisterFace([FromForm] string userid, - CancellationToken token) - { - var formFiles = HttpContext.Request.Form.Files; - - var backendResponse = await _dispatcher.RegisterFaces(userid, formFiles, token); - if (backendResponse is BackendFaceRegisterResponse detectResponse) - { - var response = new RegisterFaceResponse - { - success = true, - message = detectResponse.message - }; - - return response; - } - - return HandleErrorResponse(backendResponse); - } - - /// - /// Recognize Faces in image. - /// - /// The image file. - /// The minimum confidence for recognition. - /// The injected request aborted cancellation token. - /// A list of object names, positions, and confidence levels. - /// Array of predictions. - /// If the image in the Form data is null. - [HttpPost("face/recognize", Name = "RecognizeFaces")] - [Consumes("multipart/form-data")] - [Produces("application/json")] - [ProducesResponseType(StatusCodes.Status200OK)] - [ProducesResponseType(StatusCodes.Status400BadRequest)] - public async Task RecognizeFaces([FromForm] IFormFile image, - [FromForm] float? min_confidence, - CancellationToken token) - { - var backendResponse = await _dispatcher.RecognizeFaces(image, min_confidence, token); - if (backendResponse is BackendFaceRecognitionResponse detectResponse) - { - var response = new RecognizeFacesResponse - { - predictions = detectResponse?.predictions - ?.OrderBy(prediction => prediction.confidence) - ?.Select(prediction => new RecognizedFace - { - confidence = prediction.confidence, - userid = prediction.userid, - x_min = (int)(prediction.x_min), - x_max = (int)(prediction.x_max), - y_min = (int)(prediction.y_min), - y_max = (int)(prediction.y_max), - - }) - ?.ToArray() - }; - - return response; - } - - return HandleErrorResponse(backendResponse); - } - - /// - /// List Faces registered for recognition. - /// - /// The injected request aborted cancellation token. - /// A list of object names, positions, and confidence levels. - /// Array of predictions. - /// If the image in the Form data is null. - /// This method should be a GET, not a POST operation. We've left it as POST in - /// order to maintain compatibility with the original DeepStack code. - [HttpPost("face/list", Name = "ListFaces")] - [Consumes("multipart/form-data")] - [Produces("application/json")] - [ProducesResponseType(StatusCodes.Status200OK)] - [ProducesResponseType(StatusCodes.Status400BadRequest)] - public async Task ListRegisteredFaces(CancellationToken token) - { - var backendResponse = await _dispatcher.ListFaces(token); - if (backendResponse is BackendListRegisteredFacesResponse listResponse) - { - var response = new ListRegisteredFacesResponse - { - faces = listResponse?.faces - }; - - return response; - } - - return HandleErrorResponse(backendResponse); - } - - /// - /// Delete a registered face. - /// - /// The ID of the user whose face info should be deleted - /// The injected request aborted cancellation token. - /// Success indication. - /// Array of predictions. - /// If the image in the Form data is null. - /// This method should be a DELETE, not a POST operation. We've left it as POST in - /// order to maintain compatibility with the original DeepStack code. - [HttpPost("face/delete", Name = "DeleteFaces")] - [Consumes("multipart/form-data")] - [Produces("application/json")] - [ProducesResponseType(StatusCodes.Status200OK)] - [ProducesResponseType(StatusCodes.Status400BadRequest)] - public async Task DeleteRegisteredFaces([FromForm] string userid, - CancellationToken token) - { - var backendResponse = await _dispatcher.DeleteFaces(userid, token); - if (backendResponse is BackendFaceDeleteResponse deleteResponse) - { - var response = new DeleteFaceResponse - { - success = deleteResponse.success - }; - - return response; - } - - return HandleErrorResponse(backendResponse); - } - } -} diff --git a/src/API/Server/FrontEnd/Dockerfile b/src/API/Server/FrontEnd/Dockerfile index 92194255..a9f0bfb6 100644 --- a/src/API/Server/FrontEnd/Dockerfile +++ b/src/API/Server/FrontEnd/Dockerfile @@ -11,32 +11,42 @@ WORKDIR /src COPY ["src/API/Server/FrontEnd/Frontend.csproj", "src/API/Server/FrontEnd/"] COPY ["src/API/Server/Backend/Backend.csproj", "src/API/Server/Backend/"] COPY ["src/API/Common/Common.csproj", "src/API/Common/"] -COPY ["src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/CodeProject.SenseAI.AnalysisLayer.Yolo.csproj", "src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/"] +COPY ["src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/ObjectDetector.csproj", "src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/"] +COPY ["src/AnalysisLayer/PortraitFilter/PortraitFilter.csproj", "src/AnalysisLayer/PortraitFilter/"] COPY ["src/AnalysisLayer/SDK/ModelRunners/Yolov5Net.Scorer/Yolov5Net.Scorer.csproj", "src/AnalysisLayer/SDK/ModelRunners/Yolov5Net.Scorer/"] +Copy ["src/AnalysisLayer/SDK/NET/CodeProject.SenseAI.AnalysisLayer.SDK/CodeProject.SenseAI.AnalysisLayer.SDK.csproj", "src/AnalysisLayer/SDK/NET/CodeProject.SenseAI.AnalysisLayer.SDK/" ] + RUN dotnet restore "src/API/Server/FrontEnd/Frontend.csproj" COPY . . WORKDIR "/src/src/API/Server/FrontEnd" RUN dotnet build "Frontend.csproj" -c Release -o /app/build/server WORKDIR "/src/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo" -RUN dotnet build "CodeProject.SenseAI.AnalysisLayer.Yolo.csproj" -c Release -o /app/build/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo +RUN dotnet build "ObjectDetector.csproj" -c Release -o /app/build/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo + +WORKDIR "/src/src/AnalysisLayer/PortraitFilter" +RUN dotnet build "PortraitFilter.csproj" -c Release -o /app/build/AnalysisLayer/PortraitFilter FROM build AS publish WORKDIR "/src/src/API/Server/FrontEnd" RUN dotnet publish "Frontend.csproj" -c Release -o /app/publish/server WORKDIR "/src/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo" -RUN dotnet publish "CodeProject.SenseAI.AnalysisLayer.Yolo.csproj" -c Release -o /app/publish/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo +RUN dotnet publish "ObjectDetector.csproj" -c Release -o /app/publish/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo + +WORKDIR "/src/src/AnalysisLayer/PortraitFilter" +RUN dotnet publish "PortraitFilter.csproj" -c Release -o /app/publish/AnalysisLayer/PortraitFilter # zipping up the test images and moving to the server wwwroot directory so it can be downloaded from server. -#RUN apt-get update -y && apt-get upgrade -y -#RUN apt-get install -y zip -#WORKDIR "/src/demos/TestData" -#RUN zip -r testdata.zip . -#RUN mv testdata.zip /app/publish/server/wwwroot +RUN apt-get update -y && apt-get upgrade -y +RUN apt-get install -y zip +WORKDIR "/src/demos/TestData" +RUN zip -r testdata.zip . +RUN mv testdata.zip /app/publish/server/wwwroot # WORKDIR /src +COPY ["src/AnalysisLayer/BackgroundRemover/", "/app/publish/AnalysisLayer/BackgroundRemover"] COPY ["src/AnalysisLayer/TextSummary/", "/app/publish/AnalysisLayer/TextSummary"] -COPY ["src/AnalysisLayer/DeepStack/intelligencelayer", "/app/publish/AnalysisLayer/DeepStack/intelligencelayer"] -COPY ["src/AnalysisLayer/DeepStack/assets", "/app/publish/AnalysisLayer/DeepStack/assets"] +COPY ["src/AnalysisLayer/DeepStack/", "/app/publish/AnalysisLayer/DeepStack"] +COPY ["src/AnalysisLayer/SDK/Python", "/app/publish/AnalysisLayer/SDK/Python"] COPY ["demos/TestData", "/app/publish/demos/TestData"] FROM base AS final @@ -56,8 +66,15 @@ ENV LOGGING__CONSOLE__FORMATTERNAME=simple WORKDIR /app COPY --from=publish /app/publish . -RUN pip3 install -r /app/AnalysisLayer/TextSummary/requirements.txt -RUN pip3 install -r /app/AnalysisLayer/DeepStack/intelligencelayer/requirements.txt +RUN add-apt-repository ppa:deadsnakes/ppa -y +RUN apt update -y +RUN apt-get install python3.9 -y +RUN python3.8 -m pip install --upgrade pip +RUN python3.9 -m pip install --upgrade pip + +RUN python3.8 -m pip install -r /app/AnalysisLayer/TextSummary/requirements.txt +RUN python3.8 -m pip install -r /app/AnalysisLayer/DeepStack/intelligencelayer/requirements.txt +RUN python3.9 -m pip install -r /app/AnalysisLayer/BackgroundRemover/requirements.txt WORKDIR /app/server ENTRYPOINT ["dotnet", "CodeProject.SenseAI.Server.dll"] \ No newline at end of file diff --git a/src/API/Server/FrontEnd/Frontend.csproj b/src/API/Server/FrontEnd/Frontend.csproj index 0a290b89..eba62cf3 100644 --- a/src/API/Server/FrontEnd/Frontend.csproj +++ b/src/API/Server/FrontEnd/Frontend.csproj @@ -1,160 +1,114 @@  - - net6.0 - disable - enable - CodeProject.SenseAI.Server - 1.2.1.0 - CodeProject.SenseAI.API.Server.Frontend - 14515168-17dd-49db-9023-0749bb408a37 - Linux - True - ..\..\..\.. - codeproject/senseai-server - favicon.ico - false - true - CodeProject - CodeProject - CodeProject SenseAI Server - + + net6.0 + disable + enable + CodeProject.SenseAI.Server + 1.3.0.0 + CodeProject.SenseAI.API.Server.Frontend + 14515168-17dd-49db-9023-0749bb408a37 + Linux + True + ..\..\..\.. + codeproject/senseai-server + favicon.ico + false + true + CodeProject + CodeProject + CodeProject SenseAI Server + - - true - true - true - SSPL-1.0 - A Service hosting the CodeProject SenseAI WebAPI for face detection and recognition, object detection, and scene classification, and other AI operations. - - - Windows - - - Linux - - - OSX - + + true + true + true + SSPL-1.0 + A Service hosting the CodeProject SenseAI WebAPI for face detection and recognition, object detection, and scene classification, and other AI operations. + + + Windows + + + Linux + + + macOS + - - true - - none - false - + + true + + none + false + - - true - - + + true + + - - - - + + + + - - - + + + - - - - - - + + + + + + - - - - + + + - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - PreserveNewest - - - Always - - + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + PreserveNewest + + + Always + + - - - - - + \ No newline at end of file diff --git a/src/API/Server/FrontEnd/FrontendOptions.cs b/src/API/Server/FrontEnd/FrontendOptions.cs index 2c6da2f6..14d3c5c4 100644 --- a/src/API/Server/FrontEnd/FrontendOptions.cs +++ b/src/API/Server/FrontEnd/FrontendOptions.cs @@ -2,6 +2,8 @@ using System; using System.Collections.Generic; +using CodeProject.SenseAI.Server.Backend; + namespace CodeProject.SenseAI.API.Server.Frontend { /// @@ -41,110 +43,14 @@ public class FrontendOptions public string? PYTHON_BASEPATH { get; set; } /// - /// Gets or sets the path to the Python 3.7 interpreter - /// - public string? PYTHON37_PATH { get; set; } - - /// - /// Gets or sets the information to start all the backend processes. - /// - public StartupProcess[]? StartupProcesses { get; set; } - - /// - /// Gets or sets the information to pass to the backend processes. - /// - public Dictionary? BackendEnvironmentVariables { get; set; } - } - - /// - /// Data mapping a path to a queue and command. - /// - public struct ApiRouteMap - { - /// - /// Gets or sets the path for the route. - /// - public string Path { get; set; } - - /// - /// Gets or sets the name of the Queue used by this path. - /// - public string Queue { get; set; } - - /// - /// Gets the command passed as part of the payload to the - /// queue for this path. - /// - public string Command { get; set; } - - /// - /// Initializes a new instance of the ApiRoute struct. - /// - /// - /// - /// - public ApiRouteMap(string Path, string Queue, string Command) - { - this.Path = Path; - this.Queue = Queue; - this.Command = Command; - } - } - - /// - /// Information required to start the backend processes. - /// - public class StartupProcess - { - /// - /// Gets or sets the Name to be displayed. - /// - public string? Name { get; set; } - - /// - /// Gets or sets whether this process is currently running. - /// - public bool? Running { get; set; } = false; - - /// - /// Gets or sets the name of the configuration value which enables this process. - /// - public string[] EnableFlags { get; set; } = Array.Empty(); - - /// - /// Gets or sets a value indicating whether this procoess should be activated on startup if - /// no instruction to the contrary is seen. A default "Start me up" flag. - /// - public bool? Activate { get; set; } - - /// - /// Gets or sets the name of the Queue used by this process. - /// - public string[] Queues { get; set; } = Array.Empty(); - - /// - /// Gets or sets a list of RouteMaps. - /// - public ApiRouteMap[] RouteMaps { get; set; } = Array.Empty(); - - /// - /// Gets or sets the name of the command to be executed. - /// - public string? Command { get; set; } - - /// - /// Gets or set the working directory for this command - /// - public string? WorkingDirectory { get; set; } - - /// - /// Gets or sets the arguments passed to the command. + /// Gets or sets the tamplated path to the Python interpreter. This path + /// may include a %PYTHON_RUNTIME% marker which will need to be replaced. /// - public string? Args { get; set; } + public string? PYTHON_PATH { get; set; } /// - /// Gets or sets the platforms on which this module is supported. + /// Gets or sets the environment variables, common to the SenseAI Server ecosystem, to set. /// - public string[] Platforms { get; set; } = Array.Empty(); + public Dictionary? EnvironmentVariables { get; set; } } } diff --git a/src/API/Server/FrontEnd/ModuleCollection.cs b/src/API/Server/FrontEnd/ModuleCollection.cs new file mode 100644 index 00000000..540cca31 --- /dev/null +++ b/src/API/Server/FrontEnd/ModuleCollection.cs @@ -0,0 +1,103 @@ +using CodeProject.SenseAI.Server.Backend; + +using System; +using System.Collections.Generic; + +namespace CodeProject.SenseAI.API.Server.Frontend +{ + /// + /// The set of modules for backend processing. + /// + public class ModuleCollection : Dictionary + { + } + + /// + /// Information required to start the backend processes. + /// + public class ModuleConfig + { + /// + /// Gets or sets a value indicating whether this procoess should be activated on startup if + /// no instruction to the contrary is seen. A default "Start me up" flag. + /// + public bool? Activate { get; set; } + + /// + /// Gets or sets the Name to be displayed. + /// + public string? Name { get; set; } + + /// + /// Gets or sets the runtime used to execute the file at FilePath. For example, the runtime + /// could be "dotnet" or "python39". + /// + public string? Runtime { get; set; } + + /// + /// Gets or sets the command to execute the file at FilePath. If set, this overrides Runtime. + /// An example would be "/usr/bin/python3". This property allows you to specify an explicit + /// command in case the necessary runtime hasn't been registered, or in case you need to + /// provide specific flags or naming alternative when executing the FilePath on different + /// platforms. + /// + public string? Command { get; set; } + + /// + /// Gets or sets the path to the startup file relative to the module directory. + /// + /// + /// If no Runtime or Command is specified then a default runtime will be chosen based on + /// the extension. Currently this is: + /// .py => it will be started with the default Python interpreter + /// .dll => it will be started with the .NET runtime. + /// + public string? FilePath { get; set; } + + /// + /// Gets or sets the time this module was started. + /// + public DateTime? Started { get; set; } = null; + + /// + /// Gets or sets the latest time a request from this module was spotted by the queue manager. + /// + public DateTime? LastSeen { get; set; } = null; + + /// + /// Gets a value indicating whether this process is currently active + /// + public bool Running + { + get + { + return LastSeen != null && (DateTime.UtcNow - LastSeen!) < TimeSpan.FromSeconds(65); + } + } + + /// + /// Gets or sets the number of requests processed + /// + public int? Processed { get; set; } = 0; + + /// + /// Gets or sets the name of the configuration value which enables this process. + /// + public string[] EnableFlags { get; set; } = Array.Empty(); + + /// + /// Gets or sets the information to pass to the backend processes. + /// + public Dictionary? EnvironmentVariables { get; set; } + + /// + /// Gets or sets a list of RouteMaps. + /// + public BackendRouteInfo[] RouteMaps { get; set; } = Array.Empty(); + + /// + /// Gets or sets the platforms on which this module is supported. + /// + public string[] Platforms { get; set; } = Array.Empty(); + } +} diff --git a/src/API/Server/FrontEnd/Program.cs b/src/API/Server/FrontEnd/Program.cs index dae9de2a..7ac0e865 100644 --- a/src/API/Server/FrontEnd/Program.cs +++ b/src/API/Server/FrontEnd/Program.cs @@ -22,7 +22,7 @@ namespace CodeProject.SenseAI.API.Server.Frontend public class Program { static int _port = 5000; - // static int _sPort = 5001; + // static int _sPort = 5001; - eventually for SSL /// /// The Application Entry Point. @@ -55,26 +55,25 @@ public static async Task Main(string[] args) } } - string platform = "windows"; - if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) - platform = "osx"; - else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) - platform = "linux"; + // lower cased as Linux has case senitive file names + string platform = BackendProcessRunner.Platform.ToLower(); + string? aspNetEnv = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") + ?.ToLower(); + // Get a directory for the given platform that allows momdules to store persisted data string programDataDir = Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData); string applicationDataDir = $"{programDataDir}\\{company}\\{product}".Replace('\\', Path.DirectorySeparatorChar); - if (platform == "osx") + + // .NET's suggestion for macOS isn't great. Let's do something different. + if (platform == "macos") applicationDataDir = $"~/Library/Application Support/{company}/{product}"; + // Store this dir in the config settings so we can get to it later. var inMemoryConfigData = new Dictionary { { "ApplicationDataDir", applicationDataDir } }; bool inVScode = (Environment.GetEnvironmentVariable("RUNNING_IN_VSCODE") ?? "") == "true"; - bool inDocker = (Environment.GetEnvironmentVariable("DOTNET_RUNNING_IN_CONTAINER") ?? "") == "true"; - - if (inDocker) - platform = "docker"; // which in our case implies that we are running in Linux IHost? host = CreateHostBuilder(args) .ConfigureAppConfiguration((hostingContext, config) => @@ -86,6 +85,9 @@ public static async Task Main(string[] args) { config.AddJsonFile(Path.Combine(baseDir, "appsettings.json"), optional: false, reloadOnChange: true); + + config.AddJsonFile(Path.Combine(baseDir, $"appsettings.{aspNetEnv}.json"), + optional: true, reloadOnChange: true); } config.AddJsonFile(Path.Combine(baseDir, $"appsettings.{platform}.json"), @@ -93,16 +95,8 @@ public static async Task Main(string[] args) // ListEnvVariables(Environment.GetEnvironmentVariables()); - string? aspNetEnv = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT"); if (!string.IsNullOrWhiteSpace(aspNetEnv)) { - // We've had issues where the default appsettings files not being loaded. - if (inVScode && platform != "windows") - { - config.AddJsonFile(Path.Combine(baseDir, $"appsettings.{aspNetEnv}.json"), - optional: true, reloadOnChange: true); - } - config.AddJsonFile(Path.Combine(baseDir, $"appsettings.{platform}.{aspNetEnv}.json"), optional: true, reloadOnChange: true); } @@ -114,6 +108,7 @@ public static async Task Main(string[] args) reloadOnChange: true, optional: true); // ListConfigSources(config.Sources); + LoadModulesConfiguration(config, aspNetEnv); }) .Build() ; @@ -147,6 +142,67 @@ public static async Task Main(string[] args) } } + // TODO: This does not belong here and dhould be moved in to a Modules class. + // Loading of the module settings should not be done as part of the startup as this means + // modulesettings files can abort the Server startup. + // We could: + // - create a separate ConfigurationBuilder + // - clear the configuration sources + // - add the modulesettings files as we do now + // - build a configuration from this builder + // - use this configuration to load the module settings + // The module class will have methods and properties to get the ModuleConfigs, and other + // things. To be done at a later date. + private static void LoadModulesConfiguration(IConfigurationBuilder config, string? aspNetEnv) + { + IConfiguration configuration = config.Build(); + var options = configuration.GetSection("FrontEndOptions"); + string? rootPath = options["ROOT_PATH"]; + string? modulesPath = options["MODULES_PATH"]; + + // Get the Modules Path + rootPath = Path.Combine(AppContext.BaseDirectory, rootPath); + rootPath = rootPath.Replace('\\', Path.DirectorySeparatorChar); + rootPath = Path.GetFullPath(rootPath); + + if (string.IsNullOrWhiteSpace(rootPath)) + return; + + modulesPath = modulesPath.Replace("%ROOT_PATH%", rootPath); + modulesPath = modulesPath.Replace('\\', Path.DirectorySeparatorChar); + modulesPath = Path.GetFullPath(modulesPath); + + if (string.IsNullOrWhiteSpace(modulesPath)) + return; + + string platform = BackendProcessRunner.Platform.ToLower(); + aspNetEnv = aspNetEnv?.ToLower(); + + // Get the Modules Directories + // Be careful of the order. + var directories = Directory.GetDirectories(modulesPath); + foreach (string? directory in directories) + { + config.AddJsonFile(Path.Combine(directory, "modulesettings.json"), + optional: true, reloadOnChange: true); + + if (!string.IsNullOrEmpty(aspNetEnv)) + { + config.AddJsonFile(Path.Combine(directory, $"modulesettings.{aspNetEnv}.json"), + optional: true, reloadOnChange: true); + } + + config.AddJsonFile(Path.Combine(directory, $"modulesettings.{platform}.json"), + optional: true, reloadOnChange: true); + + if (!string.IsNullOrEmpty(aspNetEnv)) + { + config.AddJsonFile(Path.Combine(directory, $"modulesettings.{platform}.{aspNetEnv}.json"), + optional: true, reloadOnChange: true); + } + } + } + /// /// Creates the Host Builder for the application /// @@ -193,10 +249,11 @@ public static IHostBuilder CreateHostBuilder(string[] args) private static int GetServerPort(WebHostBuilderContext hostbuilderContext) { IConfiguration config = hostbuilderContext.Configuration; - int port = config.GetValue("PORT", -1); + // REVIEW: [Matthew] These should both be PORT_CLIENT, not PORT. + int port = config.GetValue("PORT", -1); if (port < 0) - port = config.GetValue("FrontEndOptions:BackendEnvironmentVariables:PORT", -1); + port = config.GetValue("FrontEndOptions:EnvironmentVariables:PORT", -1); // TODO: PORT_CLIENT if (port < 0) { @@ -206,6 +263,7 @@ private static int GetServerPort(WebHostBuilderContext hostbuilderContext) if (!int.TryParse(urls.Split(':').Last().Trim('/'), out port)) port = _port; + // REVIEW: [Matthew] This should be PORT_CLIENT, not PORT. config["PORT"] = port.ToString(); } } diff --git a/src/API/Server/FrontEnd/Startup.cs b/src/API/Server/FrontEnd/Startup.cs index cd866d57..64b70fa3 100644 --- a/src/API/Server/FrontEnd/Startup.cs +++ b/src/API/Server/FrontEnd/Startup.cs @@ -93,7 +93,7 @@ public void ConfigureServices(IServiceCollection services) // ListConfigValues(); // Configure application services and DI - services.Configure(Configuration.GetSection(nameof(BackendOptions))) + services.Configure(Configuration.GetSection(nameof(QueueProcessingOptions))) .AddQueueProcessing(); // Moved into its own file diff --git a/src/API/Server/FrontEnd/appsettings.Development.json b/src/API/Server/FrontEnd/appsettings.Development.json index 1549cab3..a711db95 100644 --- a/src/API/Server/FrontEnd/appsettings.Development.json +++ b/src/API/Server/FrontEnd/appsettings.Development.json @@ -1,42 +1,18 @@ { - "UpdateCheckUrl": "https://www.codeproject.com/ai/sense/version.aspx", - "UpdateDownloadUrl": "https://www.codeproject.com/ai/sense/latest.aspx", - //"LaunchAnalysisServices": false, + "UpdateCheckUrl": "https://www.codeproject.com/ai/sense/version.aspx", + "UpdateDownloadUrl": "https://www.codeproject.com/ai/sense/latest.aspx", - "FrontEndOptions": { + //"LaunchAnalysisServices": false, - // The root of the application, relative to the current application working directory. In production, - // the SenseAI server sits in the /Server dir, so we need to go up one level to get to the root path - "ROOT_PATH": "..\\..\\..\\..\\..\\..\\..", - "MODULES_PATH": "%ROOT_PATH%\\src\\AnalysisLayer", - "PYTHON_BASEPATH": "%MODULES_PATH%\\bin\\%PLATFORM%\\Python37\\venv", - "PYTHON37_PATH": "%PYTHON_BASEPATH%\\scripts\\Python", // overridden in platform specific json + "FrontEndOptions": { - // Values specific to the analysis modules - "BackendEnvironmentVariables": { + "ROOT_PATH": "..\\..\\..\\..\\..\\..\\..", + "MODULES_PATH": "%ROOT_PATH%\\src\\AnalysisLayer", + "PYTHON_BASEPATH": "%MODULES_PATH%\\bin\\%PLATFORM%\\%PYTHON_RUNTIME%\\venv", - // Shared values - "ERRLOG_APIKEY": "", // Go to errlog.io and grab yourself an ID - - // For Legacy Modules - "VISION-FACE": true, - "VISION-DETECTION": false, - "VISION-SCENE": true, - - // For the .NET YOLO Module... - "CPSENSEAI-YOLO": true - }, - - // The processes (typically the backend analysis processes) that are to be started when the - // server starts. They will be started in order of appearance. For "Command" we currently - // provide the %PYTHON37_PATH% macro that will point to an instance of the Python 3.7 - // interpreter if needed. - "StartupProcesses": { - - "ObjectDetection": { - "Args": "%MODULES_PATH%\\CodeProject.SenseAI.AnalysisLayer.Yolo\\bin\\debug\\net6.0\\CodeProject.SenseAI.AnalysisLayer.Yolo.dll", - "WorkingDirectory": "%MODULES_PATH%\\CodeProject.SenseAI.AnalysisLayer.Yolo\\bin\\debug\\net6.0" - } + // Values provided to all analysis modules, as well as the front-end server + "EnvironmentVariables": { + "ERRLOG_APIKEY": "" // Go to errlog.io and grab yourself an ID + } } - } } diff --git a/src/API/Server/FrontEnd/appsettings.docker.development.json b/src/API/Server/FrontEnd/appsettings.docker.development.json index 2d3b2d33..9041ca5b 100644 --- a/src/API/Server/FrontEnd/appsettings.docker.development.json +++ b/src/API/Server/FrontEnd/appsettings.docker.development.json @@ -1,22 +1,7 @@ { "FrontEndOptions": { - - "ROOT_PATH": "../../../../../../..", - - "MODULES_PATH": "%ROOT_PATH%/src/AnalysisLayer", - "PYTHON_BASEPATH": "%MODULES_PATH%/bin/%PLATFORM%/python37/venv", // in Development, Python goes under separate OS foilders - "PYTHON37_PATH": "%PYTHON_BASEPATH%/bin/python3.7", - - // Values specific to the analysis modules - "BackendEnvironmentVariables": { - - "APPDIR": "%MODULES_PATH%/DeepStack/intelligencelayer", - "DATA_DIR": "%MODULES_PATH%/DeepStack/datastore", - "TEMP_PATH": "%MODULES_PATH%/DeepStack/tempstore", - "MODELS_DIR": "%MODULES_PATH%/DeepStack/assets", - "PROFILE": "desktop_cpu", - "CUDA_MODE": "False", - "MODE": "MEDIUM" - } - } + "ROOT_PATH": "../../../../../../..", + "PYTHON_BASEPATH": "%MODULES_PATH%/bin/%PLATFORM%/%PYTHON_RUNTIME%/venv", // in Development, Python goes under separate OS folders + "PYTHON_PATH": "%PYTHON_BASEPATH%/bin/python3.7" + } } \ No newline at end of file diff --git a/src/API/Server/FrontEnd/appsettings.docker.json b/src/API/Server/FrontEnd/appsettings.docker.json index a2e6f0ef..48924b56 100644 --- a/src/API/Server/FrontEnd/appsettings.docker.json +++ b/src/API/Server/FrontEnd/appsettings.docker.json @@ -1,38 +1,6 @@ { - "FrontEndOptions": { - - "ROOT_PATH": "/app", // The current working dir. Overridden in platform specific jsons - "PYTHON37_PATH": "python3", - - // Values specific to the analysis modules - "BackendEnvironmentVariables": { - "NLTK_DATA": "%MODULES_PATH%/textSummary/nltk_data" - }, - - // Need to explicitly enter the command as the BackendProccessRunner tries to expand - // PYTHON37_PATH to a full path and mistakes "python3" as a relative directory. - "StartupProcesses": { - - "TextSummary": { - "Command": "python3" - }, - - "FaceProcessing": { - "Command": "python3" - }, - - "SceneClassification": { - "Command": "python3" - }, - - "ObjectDetection": { - "Args": "%MODULES_PATH%/CodeProject.SenseAI.AnalysisLayer.Yolo/CodeProject.SenseAI.AnalysisLayer.Yolo.dll", - "WorkingDirectory": "%MODULES_PATH%/CodeProject.SenseAI.AnalysisLayer.Yolo" - }, - - "LegacyObjectDetection": { - "Command": "python3" - } - } - } + "FrontEndOptions": { + "ROOT_PATH": "/app", + "PYTHON_PATH": "%PYTHON_RUNTIME%" + } } \ No newline at end of file diff --git a/src/API/Server/FrontEnd/appsettings.json b/src/API/Server/FrontEnd/appsettings.json index 36d7e737..c8d6a942 100644 --- a/src/API/Server/FrontEnd/appsettings.json +++ b/src/API/Server/FrontEnd/appsettings.json @@ -23,15 +23,22 @@ // for debugging the modules separately) "LaunchAnalysisServices": true, - // Request queue settings - "BackendOptions": { + // Request queue settings. + // TODO: Rename to ModuleQueue + "QueueProcessingOptions": { "ResponseTimeout": "00:00:30", "CommandDequeueTimeout": "00:00:10", "MaxQueueLength": 32 }, + // TODO: Rename to "Server" "FrontEndOptions": { + // TODO: Add PORT_CLIENT here, with PORT_CLIENT signifying this is the port that a client + // of the server should use, as opposed to PORT_MODULE which is the port a a module + // should use for communicating with the server + // PORT_CLIENT: 5000, // Port for frontend clients using the API + // The root of the application, relative to the current application working directory. In // production, the SenseAI server sits in the /Server dir, so we need to go up one level to // get to the root path @@ -40,167 +47,31 @@ // The location of the AI modules "MODULES_PATH": "%ROOT_PATH%\\AnalysisLayer", - // The location of "the" Python interpreter. In Development mode, the Python folder will go - // under an OS specific folder. This allows the same repo to run under Windows and Linux. - // We say "the" python interpreter because future versions may support multiple Python - // versions - "PYTHON_BASEPATH": "%MODULES_PATH%\\bin\\Python37\\venv", - "PYTHON37_PATH": "%PYTHON_BASEPATH%\\scripts\\Python", + // The template location of the Python interpreter for use when the Runtime for a module is + // set as "Python". In Development mode, the Python folder will go under an OS specific + // folder. This allows the same python modules to run under Windows and Linux. + // %PYTHON_RUNTIME% will be replaced by the ModuleConfig's Runtime value if the Runtime + // begins with "Python". Ultimately, Runtimes and their launcher's paths will be registered + // by "runtime" type modules, removing the need for this hardcoding. + "PYTHON_BASEPATH": "%MODULES_PATH%\\bin\\%PYTHON_RUNTIME%\\venv", + "PYTHON_PATH": "%PYTHON_BASEPATH%\\scripts\\Python", + // These are no longer used // "SERVEREXE_DIRNAME": "server", // dir name containing the API server exe for production. - // "APICODE_DIRNAME": "api", // dir name containing all the API server code. + // "APICODE_DIRNAME": "api" // dir name containing all the API server code. // These key/values are added to the set of environment variables when the backend // processed are launched. - "BackendEnvironmentVariables": { + "EnvironmentVariables": { - // Shared values - "ERRLOG_APIKEY": "ed359c3a-8a77-4f23-8db3-d3eb5fac23d9", + // TODO: Change PORT to PORT_SENSEAI to provide the backend analysis services the port + // they will use when communicating with senseAI's queues. Individual modules are + // welcome to add their own "PORT" var themselves in their modulesettings.json + // file if they need it. + // PORT_SENSEAI: 5000, // Port for backend analysis servers providing services to the API "PORT": 5000, - "VIRTUAL_ENV": "%PYTHON_BASEPATH%", - - // For Text - "NLTK_DATA": "%MODULES_PATH%\\TextSummary\\nltk_data", - - // For Legacy Modules - "VISION-FACE": true, - "VISION-DETECTION": false, - "VISION-SCENE": true, - - // For the .NET YOLO Module... - "CPSENSEAI-YOLO": true, - - "APPDIR": "%MODULES_PATH%\\DeepStack\\intelligencelayer", - "DATA_DIR": "%MODULES_PATH%\\DeepStack\\datastore", - "TEMP_PATH": "%MODULES_PATH%\\DeepStack\\tempstore", - "MODELS_DIR": "%MODULES_PATH%\\DeepStack\\assets", - "PROFILE": "desktop_cpu", - "CUDA_MODE": "False", - "MODE": "MEDIUM" - }, - - // The processes (typically the backend analysis processes) that are to be started when the - // server starts. They will be started in order of appearance. For "Command" we currently - // provide the %PYTHON37_PATH% macro that will point to an instance of the Python 3.7 - // interpreter in a virtual environment if needed. - "StartupProcesses": { - - "TextSummary": { - "Name": "Text Summary", - "Activate": true, - "EnableFlags": [ "TEXT-SUMMARY" ], - "Queues": [ "summary_queue" ], - "RouteMaps": [ - { - "Path": "text/summarize", - "Queue": "summary_queue", - "Command": "summarize" - } - ], - "Command": "%PYTHON37_PATH%", - "Args": "textsummary.py", - "WorkingDirectory": "%MODULES_PATH%\\TextSummary", - "Platforms": [ "windows", "linux", "osx" ] - }, - - "FaceProcessing": { - "Name": "Face Processing", - "Activate": true, - "EnableFlags": [ "VISION-FACE" ], - "Queues": [ "face_queue" ], - "RouteMaps": [ - { - "Path": "vision/face", - "Queue": "face_queue", - "Command": "detect" - }, - { - "Path": "vision/face/match", - "Queue": "face_queue", - "Command": "match" - }, - { - "Path": "vision/face/list", - "Queue": "face_queue", - "Command": "list" - }, - { - "Path": "vision/face/register", - "Queue": "face_queue", - "Command": "register" - }, - { - "Path": "vision/face/delete", - "Queue": "face_queue", - "Command": "delete" - }, - { - "Path": "vision/face/recognize", - "Queue": "face_queue", - "Command": "recognize" - } - ], - "Command": "%PYTHON37_PATH%", - "Args": "face.py", - "WorkingDirectory": "%MODULES_PATH%\\DeepStack\\intelligencelayer", - "Platforms": [ "windows", "linux", "osx" ] - }, - - "SceneClassification": { - "Name": "Scene Classification", - "Activate": true, - "EnableFlags": [ "VISION-SCENE" ], - "Queues": [ "scene_queue" ], - "RouteMaps": [ - { - "Path": "vision/scene", - "Queue": "scene_queue", - "Command": "detect" - } - ], - "Command": "%PYTHON37_PATH%", - "Args": "scene.py", - "WorkingDirectory": "%MODULES_PATH%\\DeepStack\\intelligencelayer", - "Platforms": [ "windows", "linux", "osx" ] - }, - - "ObjectDetection": { - "Name": "SenseAI Object Detection", - "Activate": true, - "EnableFlags": [ "CPSENSEAI-YOLO" ], - "Queues": [ "detection_queue" ], - "RouteMaps": [ - { - "Path": "vision/detection", - "Queue": "detection_queue", - "Command": "detect" - } - ], - "Command": "dotnet", - "Args": "\"%MODULES_PATH%\\CodeProject.SenseAI.AnalysisLayer.Yolo\\CodeProject.SenseAI.AnalysisLayer.Yolo.dll\"", - "WorkingDirectory": "%MODULES_PATH%\\CodeProject.SenseAI.AnalysisLayer.Yolo", - "Platforms": [ "windows", "linux", "osx" ] - }, - // We've replaced this with the SenseAI Object Detection (above). We're leaving it here - // so you can see the effect of disabling a module. - "LegacyObjectDetection": { - "Name": "Legacy Object Detection", - "Activate": false, - "EnableFlags": [ "VISION-DETECTION" ], - "Queues": [ "detection_queue" ], - "RouteMaps": [ - { - "Path": "vision/detection", - "Queue": "detection_queue", - "Command": "detect" - } - ], - "Command": "%PYTHON37_PATH%", - "Args": "detection.py", - "WorkingDirectory": "%MODULES_PATH%\\DeepStack\\intelligencelayer", - "Platforms": [ "linux", "osx", "windows" ] - } + "ERRLOG_APIKEY": "ed359c3a-8a77-4f23-8db3-d3eb5fac23d9" } } } diff --git a/src/API/Server/FrontEnd/appsettings.linux.development.json b/src/API/Server/FrontEnd/appsettings.linux.development.json index 07873a09..478e1358 100644 --- a/src/API/Server/FrontEnd/appsettings.linux.development.json +++ b/src/API/Server/FrontEnd/appsettings.linux.development.json @@ -1,18 +1,6 @@ { "FrontEndOptions": { - "ROOT_PATH": "../../../../../../..", - "API_DIRNAME": "api", // the name of the directory containing the API server - "MODULES_PATH": "%ROOT_PATH%/src/AnalysisLayer", - "PYTHON_BASEPATH": "%MODULES_PATH%/bin/%PLATFORM%/python37/venv", // in Development, Python goes under separate OS foilders - "PYTHON37_PATH": "%PYTHON_BASEPATH%/bin/python3", - - "StartupProcesses": { - - "ObjectDetection": { - "Args": "%MODULES_PATH%/CodeProject.SenseAI.AnalysisLayer.Yolo/bin/debug/net6.0/CodeProject.SenseAI.AnalysisLayer.Yolo.dll", - "WorkingDirectory": "%MODULES_PATH%/CodeProject.SenseAI.AnalysisLayer.Yolo/bin/debug/net6.0" - } - } - } + "PYTHON_BASEPATH": "%MODULES_PATH%/bin/%PLATFORM%/%PYTHON_RUNTIME%/venv", // in Development, Python goes under separate OS folders + } } \ No newline at end of file diff --git a/src/API/Server/FrontEnd/appsettings.linux.json b/src/API/Server/FrontEnd/appsettings.linux.json index f174dfbb..76b0e6b5 100644 --- a/src/API/Server/FrontEnd/appsettings.linux.json +++ b/src/API/Server/FrontEnd/appsettings.linux.json @@ -1,42 +1,6 @@ { "FrontEndOptions": { - - "PYTHON_BASEPATH": "%MODULES_PATH%/bin/Python37/venv", // in Development, Python goes under separate OS foilders - "PYTHON37_PATH": "%PYTHON_BASEPATH%/bin/python3", - - "BackendEnvironmentVariables": { - - "NLTK_DATA": "%MODULES_PATH%/TextSummary/nltk_data", - - "APPDIR": "%MODULES_PATH%/DeepStack/intelligencelayer", - "DATA_DIR": "/usr/share/CodeProject/SenseAI", // %MODULES_PATH%/DeepStack/datastore", - "TEMP_PATH": "%MODULES_PATH%/DeepStack/tempstore", - "MODELS_DIR": "%MODULES_PATH%/DeepStack/assets", - "PROFILE": "desktop_cpu", - "CUDA_MODE": "False", - "MODE": "MEDIUM" - }, - - "StartupProcesses": { - "TextSummary": { - "WorkingDirectory": "%MODULES_PATH%/TextSummary" - }, - - "FaceProcessing": { - "WorkingDirectory": "%MODULES_PATH%/DeepStack/intelligencelayer" - }, - - "SceneClassification": { - "WorkingDirectory": "%MODULES_PATH%/DeepStack/intelligencelayer" - }, - - "ObjectDetection": { - "WorkingDirectory": "%MODULES_PATH%/CodeProject.SenseAI.AnalysisLayer.Yolo" - }, - - "LegacyObjectDetection": { - "WorkingDirectory": "%MODULES_PATH%/DeepStack/intelligencelayer" - } - } + "PYTHON_BASEPATH": "%MODULES_PATH%/bin/%PYTHON_RUNTIME%/venv", // in Development, Python goes under separate OS folders + "PYTHON_PATH": "%PYTHON_BASEPATH%/bin/python3" } -} \ No newline at end of file +} diff --git a/src/API/Server/FrontEnd/appsettings.macos.development.json b/src/API/Server/FrontEnd/appsettings.macos.development.json new file mode 100644 index 00000000..478e1358 --- /dev/null +++ b/src/API/Server/FrontEnd/appsettings.macos.development.json @@ -0,0 +1,6 @@ +{ + "FrontEndOptions": { + "ROOT_PATH": "../../../../../../..", + "PYTHON_BASEPATH": "%MODULES_PATH%/bin/%PLATFORM%/%PYTHON_RUNTIME%/venv", // in Development, Python goes under separate OS folders + } +} \ No newline at end of file diff --git a/src/API/Server/FrontEnd/appsettings.macos.json b/src/API/Server/FrontEnd/appsettings.macos.json new file mode 100644 index 00000000..972e53ac --- /dev/null +++ b/src/API/Server/FrontEnd/appsettings.macos.json @@ -0,0 +1,10 @@ +{ + "FrontEndOptions": { + "PYTHON_BASEPATH": "%MODULES_PATH%/bin/%PYTHON_RUNTIME%/venv", // in Development, Python goes under separate OS folders + "PYTHON_PATH": "%PYTHON_BASEPATH%/bin/python3", + + "EnvironmentVariables": { + "PORT": 5500 + } + } +} \ No newline at end of file diff --git a/src/API/Server/FrontEnd/appsettings.osx.development.json b/src/API/Server/FrontEnd/appsettings.osx.development.json deleted file mode 100644 index b6bd15e6..00000000 --- a/src/API/Server/FrontEnd/appsettings.osx.development.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "FrontEndOptions": { - - "ROOT_PATH": "../../../../../../..", - "API_DIRNAME": "api", // the name of the directory containing the API server - "MODULES_PATH": "%ROOT_PATH%/src/AnalysisLayer", - "PYTHON_BASEPATH": "%MODULES_PATH%/bin/%PLATFORM%/python37/venv", // in Development, Python goes under separate OS foilders - "PYTHON37_PATH": "%PYTHON_BASEPATH%/bin/python3", - - "StartupProcesses": { - - "ObjectDetection": { - "Args": "%MODULES_PATH%/CodeProject.SenseAI.AnalysisLayer.Yolo/bin/debug/net6.0/CodeProject.SenseAI.AnalysisLayer.Yolo.dll", - "WorkingDirectory": "%MODULES_PATH%/CodeProject.SenseAI.AnalysisLayer.Yolo/bin/debug/net6.0" - } - } - } -} \ No newline at end of file diff --git a/src/API/Server/FrontEnd/appsettings.osx.json b/src/API/Server/FrontEnd/appsettings.osx.json deleted file mode 100644 index 6a88e6fd..00000000 --- a/src/API/Server/FrontEnd/appsettings.osx.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "FrontEndOptions": { - - "PYTHON_BASEPATH": "%MODULES_PATH%/bin/Python37/venv", // in Development, Python goes under separate OS foilders - "PYTHON37_PATH": "%PYTHON_BASEPATH%/bin/python3", - - "BackendEnvironmentVariables": { - - "PORT": 5500, - - "NLTK_DATA": "%MODULES_PATH%/TextSummary/nltk_data", - - "APPDIR": "%MODULES_PATH%/DeepStack/intelligencelayer", - "DATA_DIR": "/usr/share/CodeProject/SenseAI", // %MODULES_PATH%/DeepStack/datastore", - "TEMP_PATH": "%MODULES_PATH%/DeepStack/tempstore", - "MODELS_DIR": "%MODULES_PATH%/DeepStack/assets", - "PROFILE": "desktop_cpu", - "CUDA_MODE": "False", - "MODE": "MEDIUM" - }, - - "StartupProcesses": { - "TextSummary": { - "WorkingDirectory": "%MODULES_PATH%/TextSummary" - }, - - "FaceProcessing": { - "WorkingDirectory": "%MODULES_PATH%/DeepStack/intelligencelayer" - }, - - "SceneClassification": { - "WorkingDirectory": "%MODULES_PATH%/DeepStack/intelligencelayer" - }, - - "ObjectDetection": { - "WorkingDirectory": "%MODULES_PATH%/CodeProject.SenseAI.AnalysisLayer.Yolo" - }, - - "LegacyObjectDetection": { - "WorkingDirectory": "%MODULES_PATH%/DeepStack/intelligencelayer" - } - } - } -} \ No newline at end of file diff --git a/src/API/Server/FrontEnd/version.json b/src/API/Server/FrontEnd/version.json index 5d624dfc..bc0b08bc 100644 --- a/src/API/Server/FrontEnd/version.json +++ b/src/API/Server/FrontEnd/version.json @@ -2,13 +2,13 @@ "versionSection": { "versionInfo": { "Major": 1, - "Minor": 2, - "Patch": 1, + "Minor": 3, + "Patch": 0, "Build": 0, "PreRelease": "Beta", "SecurityUpdate": false, - "File": "CodeProject.SenseAI.Server-1.2.1.zip", - "ReleaseNotes": "Improved message bus between server and analysis modules" + "File": "CodeProject.SenseAI.Server-1.3.0.zip", + "ReleaseNotes": "First pass at the SDKs for developing backend AI Modules." } } } \ No newline at end of file diff --git a/src/API/Server/FrontEnd/wwwroot/Index.html b/src/API/Server/FrontEnd/wwwroot/Index.html index 77061e3e..dc7dd880 100644 --- a/src/API/Server/FrontEnd/wwwroot/Index.html +++ b/src/API/Server/FrontEnd/wwwroot/Index.html @@ -135,9 +135,8 @@ } /** - * TODO: This will query the server for a list of services that are - * installed, and their status. The results of this will be used to - * pppulate the serviceStatus table + * Query the server for a list of services that are installed, and their status. The + * results of this will be used to populate the serviceStatus table */ async function getAnalysisStatus() { @@ -155,16 +154,28 @@ if (data && data.statuses) { - let results = "
"; + let results = "
"; for (let i = 0; i < data.statuses.length; i++) { - let className = data.statuses[i].value ? "success" : "warning"; - let status = data.statuses[i].value ? "Enabled" : "Not Running"; - results += "
" - + "" + data.statuses[i].key.toUpperCase() + "
" - + status - + "
"; + let running = data.statuses[i].running; + let moduleName = data.statuses[i].name; + let started = data.statuses[i].started; + let lastSeen = data.statuses[i].lastSeen; + let processed = data.statuses[i].processed; + + let className = running ? "success" : (started? "warning" : "muted"); + let status = running ? "Enabled" : (started? "Not Running" : "Not Started"); + + results += + "
" + + "" + moduleName + "" + + "
" + status + "
" + + "
" + // + "
Last: " + lastSeen + "
" + + "
Processed: " + processed + "
" + + "
" + + "
"; } results += "
"; @@ -194,9 +205,9 @@ let newLogs = ""; for (let i = 0; i < data.entries.length; i++) { let logEntry = data.entries[i]; - let lotText = logEntry.entry.replace("\n", "
"); + let logText = logEntry.entry.replace("\n", "
"); let date = new Date(logEntry.timestamp).toLocaleTimeString(); - newLogs += "
" + date + ": " + lotText + "
"; + newLogs += "
" + date + ": " + logText + "
"; _lastLogId = logEntry.id; } diff --git a/src/API/Server/FrontEnd/wwwroot/Vision.html b/src/API/Server/FrontEnd/wwwroot/Vision.html index c3221f9b..fa77da2a 100644 --- a/src/API/Server/FrontEnd/wwwroot/Vision.html +++ b/src/API/Server/FrontEnd/wwwroot/Vision.html @@ -21,14 +21,18 @@ + + + +
+

Background Remover Test Page

+
+
+
+ + +
+
+ +
+ + +
+
+
+
+ +
+
+
+
+ +
+ + +
+
+ + +
+
+ + + + \ No newline at end of file diff --git a/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/ObjectDetector.cs b/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/ObjectDetector.cs index 1451f9fe..5e81135a 100644 --- a/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/ObjectDetector.cs +++ b/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/ObjectDetector.cs @@ -13,8 +13,26 @@ using Yolov5Net.Scorer; using Yolov5Net.Scorer.Models; +using CodeProject.SenseAI.AnalysisLayer.SDK; + namespace CodeProject.SenseAI.Analysis.Yolo { + /// + /// An Object Detection Prediction. + /// + public class DetectionPrediction : BoundingBoxPrediction + { + public string? label { get; set; } + } + + /// + /// An Object Detection Response. + /// + public class BackendObjectDetectionResponse : BackendSuccessResponse + { + public DetectionPrediction[]? predictions { get; set; } + } + /// /// An YoloV5 object detector. /// diff --git a/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/CodeProject.SenseAI.AnalysisLayer.Yolo.csproj b/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/ObjectDetector.csproj similarity index 90% rename from src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/CodeProject.SenseAI.AnalysisLayer.Yolo.csproj rename to src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/ObjectDetector.csproj index 1f4ef850..e2ce4b4e 100644 --- a/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/CodeProject.SenseAI.AnalysisLayer.Yolo.csproj +++ b/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/ObjectDetector.csproj @@ -6,13 +6,14 @@ enable disable CodeProject.SenseAI.AnalysisLayer.Yolo - CodeProject.SenseAI.AnalysisLayer.Yolo - 1.1.0.0 + ObjectDetector + 1.3.0.0 dotnet-CodeProject.SenseAI.AnalysisLayer.Yolo-384BE45C-AAED-42BA-9DDB-EF37356B630F Linux ..\..\.. favicon.ico true + CodeProject none @@ -45,8 +46,8 @@ - + diff --git a/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/YoloProcessor.cs b/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/YoloProcessor.cs index e4731d25..c0ec58c4 100644 --- a/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/YoloProcessor.cs +++ b/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/YoloProcessor.cs @@ -6,12 +6,11 @@ using System.Linq; using System.Net.Http; using System.Net.Http.Json; -using System.Text.Json; using System.Threading; using System.Threading.Tasks; -using CodeProject.SenseAI.API.Server.Backend; using Microsoft.Extensions.Configuration; +using CodeProject.SenseAI.AnalysisLayer.SDK; namespace CodeProject.SenseAI.Analysis.Yolo { @@ -24,12 +23,13 @@ namespace CodeProject.SenseAI.Analysis.Yolo ///
public class YoloProcessor : BackgroundService { - private const string _queueName = "detection_queue"; - private static HttpClient? _httpClient; + private string _queueName = "detection_queue"; + private string _moduleId = "_moduleId"; + private int _parallelism = 4; // 4 also seems to be good on my machine. private readonly ILogger _logger; private readonly ObjectDetector _objectDetector; - + private readonly SenseAIClient _senseAI; /// /// Initializes a new instance of the YoloProcessor. /// @@ -47,12 +47,19 @@ public YoloProcessor(ILogger logger, if (port == default) port = 5000; - _httpClient ??= new HttpClient { - BaseAddress = new Uri($"http://localhost:{port}/") + _queueName = configuration.GetValue("MODULE_QUEUE"); + if (_queueName == default) + _queueName = "detection_queue"; + + _moduleId = configuration.GetValue("MODULE_ID"); + if (_moduleId == default) + _moduleId = "object-detect"; + + _senseAI = new SenseAIClient($"http://localhost:{port}/" #if DEBUG - ,Timeout = TimeSpan.FromMinutes(1) + ,TimeSpan.FromMinutes(1) #endif - }; + ); } /// @@ -65,7 +72,7 @@ protected override async Task ExecuteAsync(CancellationToken token) await Task.Delay(1_000, token).ConfigureAwait(false); _logger.LogInformation("Background YoloDetector Task Started."); - await LogToServer("SenseAI Object Detection module started.", token); + await _senseAI.LogToServer("SenseAI Object Detection module started.", token); List tasks = new List(); for (int i= 0; i < _parallelism; i++) @@ -82,19 +89,7 @@ private async Task ProcessQueue(CancellationToken token) BackendRequest? request = null; try { - //_logger.LogInformation("Yolo attempting to pull from Queue."); - var httpResponse = await _httpClient!.GetAsync($"v1/queue/{_queueName}", token) - .ConfigureAwait(false); - - if (httpResponse is not null && - httpResponse.StatusCode == System.Net.HttpStatusCode.OK) - { - var jsonString = await httpResponse.Content.ReadAsStringAsync(token) - .ConfigureAwait(false); - - request = JsonSerializer.Deserialize(jsonString, - new JsonSerializerOptions(JsonSerializerDefaults.Web)); - } + request = await _senseAI.GetRequest(_queueName, _moduleId, token); } catch (Exception ex) { @@ -109,7 +104,7 @@ private async Task ProcessQueue(CancellationToken token) if (file is null) { - await LogToServer("Object Detection Null or File.", token); + await _senseAI.LogToServer("Object Detection Null or File.", token); response = new BackendErrorResponse(-1, "Object Detection Invalid File."); } else @@ -125,7 +120,7 @@ private async Task ProcessQueue(CancellationToken token) } catch (Exception ex) { - await LogToServer($"Object Detection Error for {file.filename}.", token); + await _senseAI.LogToServer($"Object Detection Error for {file.filename}.", token); _logger.LogError(ex, "Yolo Object Detector Exception"); yoloResult = null; } @@ -167,21 +162,10 @@ private async Task ProcessQueue(CancellationToken token) else content = JsonContent.Create(response as BackendErrorResponse); - await _httpClient.PostAsync($"v1/queue/{request.reqid}", content, token) - .ConfigureAwait(false); + await _senseAI.SendResponse(request.reqid, _moduleId, content, token); } } - private async Task LogToServer(string message, CancellationToken token) - { - var form = new FormUrlEncodedContent(new[] - { new KeyValuePair("entry", message)} - ); - - /*var response = */ await _httpClient!.PostAsync($"v1/log", form, token) - .ConfigureAwait(false); - } - /// /// Stop the process. Does nothing. /// diff --git a/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/modulesettings.development.json b/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/modulesettings.development.json new file mode 100644 index 00000000..03e689a4 --- /dev/null +++ b/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/modulesettings.development.json @@ -0,0 +1,7 @@ +{ + "Modules": { + "ObjectDetection": { + "FilePath": "CodeProject.SenseAI.AnalysisLayer.Yolo\\bin\\debug\\net6.0\\ObjectDetector.dll" + } + } +} diff --git a/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/modulesettings.json b/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/modulesettings.json new file mode 100644 index 00000000..2b3476b4 --- /dev/null +++ b/src/AnalysisLayer/CodeProject.SenseAI.AnalysisLayer.Yolo/modulesettings.json @@ -0,0 +1,48 @@ +{ + // The processes (typically the backend analysis processes) that are to be started when the + // server starts. They will be started in order of appearance. + + "Modules": { + "ObjectDetection": { + "Activate": true, + "Name": "SenseAI Object Detection", + "Description": "Detects multiple objects of 80 types in an image.", + "FilePath": "CodeProject.SenseAI.AnalysisLayer.Yolo\\ObjectDetector.dll", + "Runtime": "dotnet", + "Platforms": [ "windows", "linux", "macos", "docker" ], + "RouteMaps": [ + { + "Path": "vision/detection", + "Queue": "detection_queue", + "Command": "detect", + "Description": "Detects multiple objects of 80 types in an image.", + "Inputs": [ + { + "Name": "image", + "Type": "File", + "Description": "The image to be analyzed." + }, + { + "Name": "min_confidence", + "Type": "Float", + "Description": "The minimum confidence level for an object will be detected. In the range 0.0 to 1.0. Default 0.4." + } + ], + "Outputs": [ + { + "Name": "success", + "Type": "Boolean", + "Description": "True if successful." + }, + { + "Name": "predictions", + "Type": "Object", + "Description": "An array of objects with the x_max, x_min, max, y_min, label and confidence." + } + ] + } + ] + } + } +} + diff --git a/src/AnalysisLayer/DeepStack/intelligencelayer/DeepStack.pyproj b/src/AnalysisLayer/DeepStack/intelligencelayer/Vision.pyproj similarity index 86% rename from src/AnalysisLayer/DeepStack/intelligencelayer/DeepStack.pyproj rename to src/AnalysisLayer/DeepStack/intelligencelayer/Vision.pyproj index 253f44eb..8221da4b 100644 --- a/src/AnalysisLayer/DeepStack/intelligencelayer/DeepStack.pyproj +++ b/src/AnalysisLayer/DeepStack/intelligencelayer/Vision.pyproj @@ -12,7 +12,7 @@ {888888a0-9f3d-457c-b088-3a5042f75d52} Standard Python launcher MSBuild|env|$(MSBuildProjectFullPath) - DeepStack + Vision @@ -20,6 +20,18 @@ 10.0 + + modulesettings.docker.json + + + modulesettings.json + + + modulesettings.macos.json + + + modulesettings.windows.json + @@ -39,7 +51,6 @@ - diff --git a/src/AnalysisLayer/DeepStack/intelligencelayer/detection.py b/src/AnalysisLayer/DeepStack/intelligencelayer/detection.py index 3af78508..5680e4bd 100644 --- a/src/AnalysisLayer/DeepStack/intelligencelayer/detection.py +++ b/src/AnalysisLayer/DeepStack/intelligencelayer/detection.py @@ -1,8 +1,13 @@ import sys +sys.path.append("../../SDK/Python") +from senseAI import SenseAIBackend, LogMethod # will also set the python packages path correctly +senseAI = SenseAIBackend() + import os import json import threading + from senseAI import SenseAIBackend # will also set the python packages path correctly senseAI = SenseAIBackend() @@ -99,19 +104,23 @@ def objectdetection(thread_name: str, delay: float): except UnidentifiedImageError: err_trace = traceback.format_exc() - senseAI.log(err_trace, is_error=True) output = { "success": False, "error": "invalid image file", "code": 400, } - senseAI.errLog("objectdetection", "detection.py", err_trace, "UnidentifiedImageError") + + senseAI.log(LogMethod.Error | LogMethod.Cloud | LogMethod.Server, + { "process": "objectdetection", + "file": "detection.py", + "method": "objectdetection", + "message": err_trace, + "exception_type": "UnidentifiedImageError"}) except Exception: err_trace = traceback.format_exc() - senseAI.log(err_trace, is_error=True) output = { "success": False, @@ -119,7 +128,12 @@ def objectdetection(thread_name: str, delay: float): "code": 500, } - senseAI.errLog("objectdetection", "detection.py", err_trace, "Exception") + senseAI.log(LogMethod.Error | LogMethod.Cloud | LogMethod.Server, + { "process": "objectdetection", + "file": "detection.py", + "method": "objectdetection", + "message": err_trace, + "exception_type": "Exception"}) finally: senseAI.endTimer(timer) @@ -128,9 +142,8 @@ def objectdetection(thread_name: str, delay: float): # time.sleep(delay) if __name__ == "__main__": - senseAI.log("Object Detection module started.") + senseAI.log(LogMethod.Info | LogMethod.Server, {"message": "Object Detection module started."}) objectdetection("", SharedOptions.SLEEP_TIME) - # TODO: Send back a "I'm alive" message to the backend of the API server so it can report to the user # for x in range(1, 4): # thread = threading.Thread(None, objectdetection, args = ("", SharedOptions.SLEEP_TIME)) diff --git a/src/AnalysisLayer/DeepStack/intelligencelayer/face.py b/src/AnalysisLayer/DeepStack/intelligencelayer/face.py index b1999b5b..268e5c29 100644 --- a/src/AnalysisLayer/DeepStack/intelligencelayer/face.py +++ b/src/AnalysisLayer/DeepStack/intelligencelayer/face.py @@ -1,7 +1,9 @@ ## import _thread as thread ## from multiprocessing import Process -from senseAI import SenseAIBackend # will also set the python packages path correctly +import sys +sys.path.append("../../SDK/Python") +from senseAI import SenseAIBackend, LogMethod # will also set the python packages path correctly senseAI = SenseAIBackend() import threading @@ -11,7 +13,6 @@ import json import os import sqlite3 -import sys import time import warnings @@ -106,9 +107,6 @@ def face(thread_name, delay): cuda=SharedOptions.CUDA_MODE, ) - init_db() - load_faces() - ADD_FACE = "INSERT INTO TB_EMBEDDINGS(userid,embedding) VALUES(?,?)" UPDATE_FACE = "UPDATE TB_EMBEDDINGS SET embedding = ? where userid = ?" SELECT_FACE = "SELECT * FROM TB_EMBEDDINGS where userid = ?" @@ -174,21 +172,35 @@ def face(thread_name, delay): except UnidentifiedImageError: err_trace = traceback.format_exc() - senseAI.log(err_trace, is_error=True) output = { "success": False, "error": "invalid image", "code": 400, } + senseAI.log(LogMethod.Error | LogMethod.Cloud | LogMethod.Server, + { + "process": "face detection", + "file": "face.py", + "method": "face", + "message": err_trace, + "exception_type": "UnidentifiedImageError" + }) except Exception: err_trace = traceback.format_exc() - senseAI.log(err_trace, is_error=True) output = { "success": False, "error": "error occured on the server", "code": 500, } + senseAI.log(LogMethod.Error | LogMethod.Cloud | LogMethod.Server, + { + "process": "face detection", + "file": "face.py", + "method": "face", + "message": err_trace, + "exception_type": "Exception" + }) finally: senseAI.endTimer(timer) @@ -285,12 +297,19 @@ def face(thread_name, delay): except UnidentifiedImageError: err_trace = traceback.format_exc() - senseAI.log(err_trace, is_error=True) output = { "success": False, "error": "invalid image", "code": 400, } + senseAI.log(LogMethod.Error | LogMethod.Cloud | LogMethod.Server, + { + "process": "face register", + "file": "face.py", + "method": "face", + "message": err_trace, + "exception_type": "UnidentifiedImageError" + }) except Exception: @@ -302,6 +321,14 @@ def face(thread_name, delay): "error": "error occured on the server", "code": 500, } + senseAI.log(LogMethod.Error | LogMethod.Cloud | LogMethod.Server, + { + "process": "face register", + "file": "face.py", + "method": "face", + "message": err_trace, + "exception_type": "Exception" + }) finally: senseAI.endTimer(timer) @@ -332,13 +359,20 @@ def face(thread_name, delay): except Exception: err_trace = traceback.format_exc() - senseAI.log(err_trace, is_error=True) output = { "success": False, "error": "error occured on the server", "code": 500, } + senseAI.log(LogMethod.Error | LogMethod.Cloud | LogMethod.Server, + { + "process": "face registration list", + "file": "face.py", + "method": "face", + "message": err_trace, + "exception_type": "Exception" + }) finally: senseAI.endTimer(timer) @@ -366,13 +400,20 @@ def face(thread_name, delay): except Exception: err_trace = traceback.format_exc() - senseAI.log(err_trace, is_error=True) output = { "success": False, "error": "error occured on the server", "code": 500, } + senseAI.log(LogMethod.Error | LogMethod.Cloud | LogMethod.Server, + { + "process": "face registration delete", + "file": "face.py", + "method": "face", + "message": err_trace, + "exception_type": "Exception" + }) finally: senseAI.endTimer(timer) @@ -525,24 +566,38 @@ def face(thread_name, delay): except UnidentifiedImageError: err_trace = traceback.format_exc() - senseAI.log(err_trace, is_error=True) output = { "success": False, "error": "invalid image", "code": 400, } + senseAI.log(LogMethod.Error | LogMethod.Cloud | LogMethod.Server, + { + "process": "face recognize", + "file": "face.py", + "method": "face", + "message": err_trace, + "exception_type": "UnidentifiedImageError" + }) except Exception: err_trace = traceback.format_exc() - senseAI.log(err_trace, is_error=True) output = { "success": False, "error": "error occured on the server", "code": 500, } + senseAI.log(LogMethod.Error | LogMethod.Cloud | LogMethod.Server, + { + "process": "face recognize", + "file": "face.py", + "method": "face", + "message": err_trace, + "exception_type": "Exception" + }) finally: senseAI.endTimer(timer) @@ -662,11 +717,13 @@ def update_faces(thread_name, delay): time.sleep(delay) if __name__ == "__main__": + init_db() + load_faces() + faceupdate_thread = threading.Thread(None, update_faces, args = ("", 1)) face_thread = threading.Thread(None, face, args = ("", SharedOptions.SLEEP_TIME)) faceupdate_thread.start() face_thread.start() - senseAI.log("Face Detection module started.") + senseAI.log(LogMethod.Info | LogMethod.Server, {"message": "Face Detection module started."}) face_thread.join(); - # TODO: Send back a "I'm alive" message to the backend of the API server so it can report to the user diff --git a/src/AnalysisLayer/DeepStack/intelligencelayer/process.py b/src/AnalysisLayer/DeepStack/intelligencelayer/process.py index 5162dbcb..f4fef3d8 100644 --- a/src/AnalysisLayer/DeepStack/intelligencelayer/process.py +++ b/src/AnalysisLayer/DeepStack/intelligencelayer/process.py @@ -36,6 +36,9 @@ def predict(self, img_path: str, confidence: float = 0.4): def predictFromImage(self, img0: Image, confidence: float = 0.4): + if img0 is None: + return [] + confidence = max(0.1,confidence) img = np.asarray(letterbox(img0, new_shape=self.reso)[0]) diff --git a/src/AnalysisLayer/DeepStack/intelligencelayer/requirements.macos.txt b/src/AnalysisLayer/DeepStack/intelligencelayer/requirements.macos.txt new file mode 100644 index 00000000..7a95090b --- /dev/null +++ b/src/AnalysisLayer/DeepStack/intelligencelayer/requirements.macos.txt @@ -0,0 +1,20 @@ +#! Python3.7 + +## SenseAI +requests # Installing Requests, the HTTP library +Pillow # Installing Pillow, a Python Image Library + +## Needed, but not used everywhere. +THOP # Installing THOP (PyTorch-OpCounter) for measuring performance +ONNX # Installing ONNX, the Open Neural Network Exchange library +Pandas # Installing Pandas, a data analysis / data manipulation tool +CoreMLTools # Installing CoreMLTools, for working with .mlmodel format models + +## General libraries +ONNXRuntime # Installing ONNX runtime, the scoring engine for ONNX models +SciPy # Installing SciPy, a library for mathematics, science, and engineering +PyYAML # Installing PyYAML, a library for reading configuration files + +## Specific versions that match the models we're using. +Torch==1.6.0 # Installing Torch, for Tensor computation and Deep neural networks +TorchVision==0.7.0 # Installing TorchVision, for Computer Vision based AI \ No newline at end of file diff --git a/src/AnalysisLayer/DeepStack/intelligencelayer/scene.py b/src/AnalysisLayer/DeepStack/intelligencelayer/scene.py index 054c1612..2f4b682d 100644 --- a/src/AnalysisLayer/DeepStack/intelligencelayer/scene.py +++ b/src/AnalysisLayer/DeepStack/intelligencelayer/scene.py @@ -1,6 +1,8 @@ ##import _thread as thread -from senseAI import SenseAIBackend # will also set the python packages path correctly +import sys +sys.path.append("../../SDK/Python") +from senseAI import SenseAIBackend, LogMethod # will also set the python packages path correctly senseAI = SenseAIBackend() import ast @@ -8,7 +10,6 @@ import json import os import sqlite3 -import sys import time import warnings @@ -117,23 +118,34 @@ def scenerecognition(thread_name, delay): except UnidentifiedImageError: err_trace = traceback.format_exc() - senseAI.log(err_trace, is_error=True) output = { "success": False, "error": "error occured on the server", "code": 400, } - - senseAI.errLog("scenerecognition", "scene.py", err_trace, "UnidentifiedImageError") + senseAI.log(LogMethod.Error | LogMethod.Cloud | LogMethod.Server, + { + "process": "scene recognize", + "file": "scene.py", + "method": "scenerecognition", + "message": err_trace, + "exception_type": "UnidentifiedImageError" + }) except Exception: err_trace = traceback.format_exc() - senseAI.log(err_trace, is_error=True) output = {"success": False, "error": "invalid image", "code": 500} - senseAI.errLog("scenerecognition", "scene.py", err_trace, "Exception") + senseAI.log(LogMethod.Error | LogMethod.Cloud | LogMethod.Server, + { + "process": "scene recognize", + "file": "scene.py", + "method": "scenerecognition", + "message": err_trace, + "exception_type": "Exception" + }) finally: senseAI.endTimer(timer) @@ -146,6 +158,5 @@ def scenerecognition(thread_name, delay): if __name__ == "__main__": - senseAI.log("Scene Detection module started.") + senseAI.log(LogMethod.Info | LogMethod.Server, {"message": "Scene Detection module started."}) scenerecognition("", SharedOptions.SLEEP_TIME) - # TODO: Send back a "I'm alive" message to the backend of the API server so it can report to the user diff --git a/src/AnalysisLayer/DeepStack/intelligencelayer/senseAI.py b/src/AnalysisLayer/DeepStack/intelligencelayer/senseAI.py deleted file mode 100644 index 9e06c423..00000000 --- a/src/AnalysisLayer/DeepStack/intelligencelayer/senseAI.py +++ /dev/null @@ -1,216 +0,0 @@ -import os -import io -import sys -import base64 -import time -import json -from datetime import datetime - -# Get the Python interpreter directory, and add to the package search path the path of the packages -# within our local virtual environment -if sys.platform.startswith('linux'): - currentPythonDir = os.path.normpath(os.path.join(os.getcwd(), "../../bin/linux/python37")) - sys.path.insert(0, currentPythonDir + "/venv/lib/python3.7/site-packages") -elif sys.platform.startswith('darwin'): - currentPythonDir = os.path.normpath(os.path.join(os.getcwd(), "../../bin/osx/python37")) - sys.path.insert(0, currentPythonDir + "/venv/lib/python3.7/site-packages") -elif sys.platform.startswith('win'): - currentPythonDir = os.path.normpath(os.path.join(os.getcwd(), "..\\..\\bin\\win\\python37")) - sys.path.insert(0, currentPythonDir + "\\venv\\lib\\site-packages") -else: - currentPythonDir = "" - -import requests -from PIL import Image - -class SenseAIBackend: - - pythonDir = currentPythonDir - virtualEnv = os.getenv("VIRTUAL_ENV", f"{pythonDir}/venv") - errLog_APIkey = os.getenv("ERRLOG_APIKEY", "") - port = os.getenv("PORT", "5000") - - errorPause = 1.0 - - BaseQueueUrl = f"http://localhost:{port}/v1/queue/" - BaseLogUrl = f"http://localhost:{port}/v1/log/" - - - requestSession = requests.Session() - - # Performance timer =========================================================================== - - def startTimer(self, desc:str) : - return (desc, time.perf_counter()) - - def endTimer(self, timer : tuple) : - (desc, startTime) = timer - elapsedSeconds = time.perf_counter() - startTime - # log(f"{desc} took {elapsedSeconds:.3} seconds") - - - # Service Commands and Responses ============================================================== - - def getCommand(self, queueName : str): - success = False - try: - cmdTimer = self.startTimer(f"Getting Command from {queueName}") - response = self.requestSession.get( - self.BaseQueueUrl + queueName, - timeout=30, - verify=False - ) - if (response.ok and len(response.content) > 2): - success = True - content = response.text - return [content] - else: - return [] - - except Exception as ex: - # print(f"Error retrieving command: {str(ex)}") - print(f"Error retrieving command: Is the API Server running?") - time.sleep(self.errorPause) - return [] - - finally: - if success: - self.endTimer(cmdTimer) - - def sendResponse(self, req_id : str, body : str): - # self.log(f"Sending response for id: {req_id}") - - success = False - respTimer = self.startTimer("Sending Response") - - try: - self.requestSession.post( - self.BaseQueueUrl + req_id, - data = body, - timeout=1, - verify=False) - - success = True - - except Exception as ex: - time.sleep(self.errorPause) - print(f"Error sending response: {str(ex)}") - # print(f"Error sending response: Is the API Server running?") - - finally: - if success: - self.endTimer(respTimer) - - - # Logging and Error Reporting ================================================================= - - def sendLog(self, entry : str): - - payload = { "entry" : entry } - - try: - self.requestSession.post( - self.BaseLogUrl, - data = payload, - timeout = 1, - verify = False) - - except Exception as ex: - # print(f"Error posting log: {str(ex)}") - print(f"Error posting log: Is the API Server running?") - return - - def log(self, entry : str, is_error : bool = False): - if is_error: - print(entry, file=sys.stderr, flush=True) - else: - print(entry, file=sys.stdout, flush=True) - self.sendLog(entry) - - - def errLog(self, method : str, file:str, message : str, exceptionType: str): - """ - Logs an error to our remote logging server (errLog.io) - """ - - url = 'https://relay.errlog.io/api/v1/log' - - obj = { - 'message' : message, - 'apikey' : self.errLog_APIkey, - 'applicationname' : 'CodeProject SenseAI', - 'type' : exceptionType, - 'errordate' : datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), - 'filename' : file, - 'method' : method, - 'lineno' : 0, - 'colno' : 0 - } - - data = json.dumps(obj) - - # If you want to see the data you're sending. - # print "Json Data: ", data - - headers = {'Content-Type': 'application/json','Accept': 'application/json'} - r = requests.post(url, data = data, headers = headers) - - # print("Response:", r) - # print("Text: " , r.text) - - return r - - def getImageFromRequest(self, req_data, index : int): - """ - Gets an image from the requests 'files' array. - """ - payload = req_data["payload"] - files = payload["files"] - img_file = files[index] - img_dataB64 = img_file["data"] - img_bytes = base64.b64decode(img_dataB64) - img_stream = io.BytesIO(img_bytes) - img = Image.open(img_stream).convert("RGB") - - return img - - def getRequestImageCount(self, req_data): - payload = req_data["payload"] - files = payload["files"] - return len(files) - - def getRequestValue(self, req_data, key : str, defaultValue : str = None): - """ - Gets a value from the HTTP request Form send by the client - Param: req_data - the request data from the HTTP form - Param: key - the name of the key holding the data in the form collection - Returns: The data if successful; None otherwise. - Remarks: Note that HTTP forms contain multiple values per key (a string array) to allow - for situations like checkboxes, where a set of checkbox controls share a name but have - unique IDs. The form will contain an array of values for the shared name. WE ONLY RETURN - THE FIRST VALUE HERE. - """ - - try: - # req_data is a dict - payload = req_data.get("payload", None) - if payload is None: - return defaultValue - - # payload is also a dict - valueList = payload.get("values", None) - if valueList is None: - return defaultValue - - # valueList is a list. Note that in a HTML form, each element may have multiple values - for value in valueList: - if value["key"] == key : - return value["value"][0] - - return defaultValue - - except: - return defaultValue - - - diff --git a/src/AnalysisLayer/DeepStack/intelligencelayer/shared.py b/src/AnalysisLayer/DeepStack/intelligencelayer/shared.py index f5d8983c..5b4bf75b 100644 --- a/src/AnalysisLayer/DeepStack/intelligencelayer/shared.py +++ b/src/AnalysisLayer/DeepStack/intelligencelayer/shared.py @@ -45,7 +45,7 @@ def getEnvVariable(varName: str, default: str): showEnvVariables = False - print(f"Analysis services setup: Retrieving environment variables...") + print(f"Vision AI services setup: Retrieving environment variables...") APPDIR = os.path.normpath(getEnvVariable("APPDIR", os.path.join(os.getcwd(), ".."))) PROFILE = getEnvVariable("PROFILE", "desktop_cpu") diff --git a/src/AnalysisLayer/DeepStack/modulesettings.docker.json b/src/AnalysisLayer/DeepStack/modulesettings.docker.json new file mode 100644 index 00000000..44128fc9 --- /dev/null +++ b/src/AnalysisLayer/DeepStack/modulesettings.docker.json @@ -0,0 +1,13 @@ +{ + "Modules": { + "FaceProcessing": { + "Runtime": "python38" + }, + "SceneClassification": { + "Runtime": "python38" + }, + "LegacyObjectDetection": { + "Runtime": "python38" + } + } +} diff --git a/src/AnalysisLayer/DeepStack/modulesettings.json b/src/AnalysisLayer/DeepStack/modulesettings.json new file mode 100644 index 00000000..8b226ce1 --- /dev/null +++ b/src/AnalysisLayer/DeepStack/modulesettings.json @@ -0,0 +1,312 @@ +{ + "Modules": { + + "FaceProcessing": { + "Name": "Face Processing", + "Activate": true, + "Description": "A number of Face image APIs including detect, recognize, and compare.", + "FilePath": "DeepStack\\intelligencelayer\\face.py", + "Runtime": "python38", + "Platforms": [ "windows", "linux", "macos", "docker" ], + "EnableFlags": [ "VISION-FACE" ], + + "EnvironmentVariables": { + "VISION-FACE": true, + + "PROFILE": "desktop_cpu", + "CUDA_MODE": "False", + "MODE": "MEDIUM", + "PORT": 5000, + + "VIRTUAL_ENV": "%PYTHON_BASEPATH%", + "APPDIR": "%MODULES_PATH%\\DeepStack\\intelligencelayer", + "DATA_DIR": "%DATA_DIR%", + "TEMP_PATH": "%MODULES_PATH%\\DeepStack\\tempstore", + "MODELS_DIR": "%MODULES_PATH%\\DeepStack\\assets" + }, + + "RouteMaps": [ + { + "Path": "vision/face", + "Queue": "face_queue", + "Command": "detect", + "Description": "Detects faces in an image.", + "Inputs": [ + { + "Name": "image", + "Type": "File", + "Description": "The image to be analyzed." + }, + { + "Name": "min_confidence", + "Type": "Float", + "Description": "The minimum confidence level for an object will be detected. In the range 0.0 to 1.0. Default 0.4." + } + ], + "Outputs": [ + { + "Name": "success", + "Type": "Boolean", + "Description": "True if successful." + }, + { + "Name": "predictions", + "Type": "Object", + "Description": "An array of objects with the x_max, x_min, max, y_min, label and confidence." + } + ] + }, + { + "Path": "vision/face/match", + "Queue": "face_queue", + "Command": "match", + "Description": "Matches faces in two images.", + "Inputs": [ + { + "Name": "image1", + "Type": "File", + "Description": "First image to be analyzed." + }, + { + "Name": "image2", + "Type": "File", + "Description": "Second image to be analyzed." + } + ], + "Outputs": [ + { + "Name": "success", + "Type": "Boolean", + "Description": "True if successful." + }, + { + "Name": "similarity", + "Type": "Float", + "Description": "How similar the two images are, in the range of 0.0 to 1.0." + } + ] + }, + { + "Path": "vision/face/list", + "Queue": "face_queue", + "Command": "list", + "Description": "Lists the users that have images registered.", + "Inputs": [ // no inputs + ], + "Outputs": [ + { + "Name": "success", + "Type": "Boolean", + "Description": "True if successful." + }, + { + "Name": "faces", + "Type": "Object", + "Description": "An array of the userid strings for users with registered images." + } + ] + }, + { + "Path": "vision/face/register", + "Queue": "face_queue", + "Command": "register", + "Description": "Registers one or more images for a user for recognition.", + "Inputs": [ + { + "Name": "imageN", + "Type": "File", + "Description": "The one or more images to be registered." + }, + { + "Name": "userid", + "Type": "Text", + "Description": "The identifying string for the user." + } + ], + "Outputs": [ + { + "Name": "success", + "Type": "Boolean", + "Description": "True if successful." + }, + { + "Name": "Message", + "Type": "Text", + "Description": "face added" + } + ] + + }, + { + "Path": "vision/face/delete", + "Queue": "face_queue", + "Command": "delete", + "Description": "Removes a userid and images from the Face Registration database.", + "Inputs": [ + { + "Name": "userid", + "Type": "Text", + "Description": "The identifying string for the user." + } + ], + "Outputs": [ + { + "Name": "success", + "Type": "Boolean", + "Description": "True if successful." + } + ] + }, + { + "Path": "vision/face/recognize", + "Queue": "face_queue", + "Command": "recognize", + "Description": "Recognizes faces in an image.", + "Inputs": [ + { + "Name": "image", + "Type": "File", + "Description": "The image to be analyzed." + }, + { + "Name": "min_confidence", + "Type": "Float", + "Description": "The minimum confidence level for an object will be detected. In the range 0.0 to 1.0. Default 0.4." + } + ], + "Outputs": [ + { + "Name": "success", + "Type": "Boolean", + "Description": "True if successful." + }, + { + "Name": "predictions", + "Type": "Object", + "Description": "An array of objects with the userid, x_max, x_min, max, y_min, label and confidence." + } + ] + + } + ] + }, + + "SceneClassification": { + "Name": "Scene Classification", + "Activate": true, + "Description": "Classifies the scene in an image.", + "FilePath": "DeepStack\\intelligencelayer\\scene.py", + "Runtime": "python38", + "Platforms": [ "windows", "linux", "macos", "docker" ], + "EnableFlags": [ "VISION-SCENE" ], + + "EnvironmentVariables": { + "VISION-SCENE": true, + + "PROFILE": "desktop_cpu", + "CUDA_MODE": "False", + "MODE": "MEDIUM", + "PORT": 5000, + + "VIRTUAL_ENV": "%PYTHON_BASEPATH%", + "APPDIR": "%MODULES_PATH%\\DeepStack\\intelligencelayer", + "DATA_DIR": "%DATA_DIR%", + "TEMP_PATH": "%MODULES_PATH%\\DeepStack\\tempstore", + "MODELS_DIR": "%MODULES_PATH%\\DeepStack\\assets" + }, + + "RouteMaps": [ + { + "Path": "vision/scene", + "Queue": "scene_queue", + "Command": "detect", + "Description": "Classifies the scene in an image.", + "Inputs": [ + { + "Name": "image", + "Type": "File", + "Description": "The image to be analyzed." + } + ], + "Outputs": [ + { + "Name": "success", + "Type": "Boolean", + "Description": "True if successful." + }, + { + "Name": "label", + "Type": "Text", + "Description": "The classification of the scene such as 'conference_room'." + }, + { + "Name": "confidence", + "Type": "Float", + "Description": "The confidence in the classification in the range of 0.0 to 1.0." + } + ] + } + ] + }, + + // We've replaced this with the SenseAI Object Detection (above). We're leaving it here + // so you can see the effect of disabling a module. + "LegacyObjectDetection": { + "Name": "Legacy Object Detection", + "Activate": false, + "Description": "Detects multiple objects of 80 types in an image.", + "FilePath": "DeepStack\\intelligencelayer\\detection.py", + "Runtime": "python38", + "Platforms": [ "windows", "linux", "macos", "docker" ], + "EnableFlags": [ "VISION-DETECTION" ], + + "EnvironmentVariables": { + "VISION-DETECTION": false, + + "PROFILE": "desktop_cpu", + "CUDA_MODE": "False", + "MODE": "MEDIUM", + "PORT": 5000, + + "VIRTUAL_ENV": "%PYTHON_BASEPATH%", + "APPDIR": "%MODULES_PATH%\\DeepStack\\intelligencelayer", + "DATA_DIR": "%DATA_DIR%", + "TEMP_PATH": "%MODULES_PATH%\\DeepStack\\tempstore", + "MODELS_DIR": "%MODULES_PATH%\\DeepStack\\assets" + }, + + "RouteMaps": [ + { + "Path": "vision/detection", + "Queue": "detection_queue", + "Command": "detect", + "Description": "Detects multiple objects of 80 types in an image.", + "Inputs": [ + { + "Name": "image", + "Type": "File", + "Description": "The image to be analyzed." + }, + { + "Name": "min_confidence", + "Type": "Float", + "Description": "The minimum confidence level for an object will be detected. In the range 0.0 to 1.0. Default 0.4." + } + ], + "Outputs": [ + { + "Name": "success", + "Type": "Boolean", + "Description": "True if successful." + }, + { + "Name": "predictions", + "Type": "Object", + "Description": "An array of objects with the x_max, x_min, max, y_min, label and confidence." + } + ] + } + ] + } + } +} \ No newline at end of file diff --git a/src/AnalysisLayer/DeepStack/modulesettings.macos.json b/src/AnalysisLayer/DeepStack/modulesettings.macos.json new file mode 100644 index 00000000..fb31f70a --- /dev/null +++ b/src/AnalysisLayer/DeepStack/modulesettings.macos.json @@ -0,0 +1,24 @@ +{ + "Modules": { + // Port 5000 is reserved on macOS, and we need to override what .NET thinks is a good place + // for data on a mac. + "FaceProcessing": { + "EnvironmentVariables": { + "PORT": 5500, + "DATA_DIR": "/usr/share/CodeProject/SenseAI" + } + }, + "SceneClassification": { + "EnvironmentVariables": { + "PORT": 5500, + "DATA_DIR": "/usr/share/CodeProject/SenseAI" + } + }, + "LegacyObjectDetection": { + "EnvironmentVariables": { + "PORT": 5500, + "DATA_DIR": "/usr/share/CodeProject/SenseAI" + } + } + } +} diff --git a/src/AnalysisLayer/DeepStack/modulesettings.windows.json b/src/AnalysisLayer/DeepStack/modulesettings.windows.json new file mode 100644 index 00000000..fa20b5d9 --- /dev/null +++ b/src/AnalysisLayer/DeepStack/modulesettings.windows.json @@ -0,0 +1,15 @@ +{ + // We don't have a Python3.8 installer for windows, so let's use the existing Python3.7 that + // we know works + "Modules": { + "FaceProcessing": { + "Runtime": "python37" + }, + "SceneClassification": { + "Runtime": "python37" + }, + "LegacyObjectDetection": { + "Runtime": "python37" + } + } +} diff --git a/src/AnalysisLayer/PortraitFilter/Lib/DeepPersonLab.cs b/src/AnalysisLayer/PortraitFilter/Lib/DeepPersonLab.cs new file mode 100644 index 00000000..ecdd89ed --- /dev/null +++ b/src/AnalysisLayer/PortraitFilter/Lib/DeepPersonLab.cs @@ -0,0 +1,149 @@ +using Microsoft.ML.OnnxRuntime; +using Microsoft.ML.OnnxRuntime.Tensors; +using System; +using System.Collections.Generic; +using System.Drawing; +using System.Drawing.Imaging; +using System.Linq; + +namespace CodeProject.SenseAI.AnalysisLayer.PortraitFilter +{ + /// + /// Defines deep person lab. + /// + public class DeepPersonLab + { + #region Private data + private const int _person = 15; + private const int _size = 513; + private InferenceSession _session; + #endregion + + #region Class components + /// + /// Initializes deep person lab. + /// + /// Model path + public DeepPersonLab(string modelPath) + { + var tic = Environment.TickCount; + Console.WriteLine("Starting inference session..."); + _session = new InferenceSession(modelPath); + Console.WriteLine($"Session started in {Environment.TickCount - tic} mls."); + } + /// + /// Returns segmentation mask. + /// + /// Input image + /// Segmentation mask + [System.Diagnostics.CodeAnalysis.SuppressMessage("Interoperability", + "CA1416:Validate platform compatibility", + Justification = "System.Drawing.EnableUnixSupport is enabled in the runtimeconfig.template.json. A true fix will be made soon.")] + public Bitmap Fit(Bitmap image) + { + // scaling image + var width = image.Width; + var height = image.Height; + var ratio = 1.0f * _size / Math.Max(width, height); + var size = new Size( + (int)(ratio * width), + (int)(ratio * height)); + var resized = new Bitmap(image, size); + + // creating tensor + Console.WriteLine("Creating image tensor..."); + var tic = Environment.TickCount; + var inputMeta = _session.InputMetadata; + var name = inputMeta.Keys.ToArray()[0]; + var dimentions = new int[] { 1, size.Height, size.Width, 3 }; + var inputData = Onnx.ToTensor(resized); + resized.Dispose(); + Console.WriteLine($"Tensor was created in {Environment.TickCount - tic} mls."); + + // prediction + Console.WriteLine("Creating segmentation mask..."); + tic = Environment.TickCount; + var t1 = new DenseTensor(inputData, dimentions); + var inputs = new List() { NamedOnnxValue.CreateFromTensor(name, t1) }; + var results = _session.Run(inputs).ToArray(); + var map = results[0].AsTensor().ToArray(); + var mask = DeepPersonLab.FromSegmentationMap(map, size.Width, size.Height); + Console.WriteLine($"Segmentation was created in {Environment.TickCount - tic} mls."); + + // return mask + return new Bitmap(mask, width, height); + } + #endregion + + #region Static methods + /// + /// Converts an RGB tensor array to a color image. + /// + /// RGB tensor array + /// Bitmap width + /// Bitmap height + /// Bitmap + [System.Diagnostics.CodeAnalysis.SuppressMessage("Interoperability", + "CA1416:Validate platform compatibility", + Justification = "System.Drawing.EnableUnixSupport is enabled in the runtimeconfig.template.json. A true fix will be made soon.")] + public unsafe static Bitmap FromSegmentationMap(long[] tensor, int width, int height) + { + Bitmap bitmap = new Bitmap(width, height); + FromSegmentationMap(tensor, width, height, bitmap); + return bitmap; + } + /// + /// Converts an RGB tensor array to a color image. + /// + /// RGBA tensor array + /// Bitmap width + /// Bitmap height + /// Bitmap + public static void FromSegmentationMap(long[] tensor, int width, int height, Bitmap Data) + { + BitmapData bmData = Onnx.Lock24bpp(Data); + FromSegmentationMap(tensor, width, height, bmData); + Onnx.Unlock(Data, bmData); + return; + } + /// + /// Converts an RGB tensor array to a color image. + /// + /// RGBA tensor array + /// Bitmap width + /// Bitmap height + /// Bitmap data + [System.Diagnostics.CodeAnalysis.SuppressMessage("Interoperability", + "CA1416:Validate platform compatibility", + Justification = "System.Drawing.EnableUnixSupport is enabled in the runtimeconfig.template.json. A true fix will be made soon.")] + public unsafe static void FromSegmentationMap(long[] tensor, int width, int height, BitmapData bmData) + { + // params + int stride = bmData.Stride; + byte* p = (byte*)bmData.Scan0.ToPointer(); + int pos = 0; + + // do job + for (int j = 0; j < height; j++) + { + int k, jstride = j * stride; + + for (int i = 0; i < width; i++, pos++) + { + k = jstride + i * 3; + + var z = (tensor[pos] == _person) + ? (byte)255 : (byte)0; + + // rgb + p[k + 2] = z; + p[k + 1] = z; + p[k + 0] = z; + } + } + + return; + } + #endregion + } +} diff --git a/src/AnalysisLayer/PortraitFilter/Lib/Onnx.cs b/src/AnalysisLayer/PortraitFilter/Lib/Onnx.cs new file mode 100644 index 00000000..7cfb6363 --- /dev/null +++ b/src/AnalysisLayer/PortraitFilter/Lib/Onnx.cs @@ -0,0 +1,152 @@ +using System.Drawing; +using System.Drawing.Imaging; + +namespace CodeProject.SenseAI.AnalysisLayer.PortraitFilter +{ + /// + /// Uses for onnx transformations. + /// + public static class Onnx + { + #region Tensor + /// + /// Converts a Bitmap to an RGB tensor array. + /// + /// Bitmap + /// RGB tensor array + public static byte[] ToTensor(this Bitmap Data) + { + BitmapData bmData = Onnx.Lock24bpp(Data); + byte[] rgb = Onnx.ToTensor(bmData); + Onnx.Unlock(Data, bmData); + return rgb; + } + /// + /// Converts a Bitmap to an RGB tensor array. + /// + /// Bitmap data + /// RGB tensor array + [System.Diagnostics.CodeAnalysis.SuppressMessage("Interoperability", + "CA1416:Validate platform compatibility", + Justification = "System.Drawing.EnableUnixSupport is enabled in the runtimeconfig.template.json. A true fix will be made soon.")] + public unsafe static byte[] ToTensor(this BitmapData bmData) + { + // params + int width = bmData.Width, height = bmData.Height, stride = bmData.Stride; + byte* p = (byte*)bmData.Scan0.ToPointer(); + byte[] t = new byte[3 * height * width]; + int pos = 0; + + // do job + for (int j = 0; j < height; j++) + { + int k, jstride = j * stride; + + for (int i = 0; i < width; i++) + { + k = jstride + i * 3; + + t[pos++] = p[k + 2]; + t[pos++] = p[k + 1]; + t[pos++] = p[k + 0]; + } + } + + return t; + } + /// + /// Converts an RGB tensor array to a color image. + /// + /// RGB tensor array + /// Bitmap width + /// Bitmap height + /// Bitmap + [System.Diagnostics.CodeAnalysis.SuppressMessage("Interoperability", + "CA1416:Validate platform compatibility", + Justification = "System.Drawing.EnableUnixSupport is enabled in the runtimeconfig.template.json. A true fix will be made soon.")] + public unsafe static Bitmap FromTensor(this byte[] tensor, int width, int height) + { + Bitmap bitmap = new Bitmap(width, height); + FromTensor(tensor, width, height, bitmap); + return bitmap; + } + /// + /// Converts an RGB tensor array to a color image. + /// + /// RGBA tensor array + /// Bitmap width + /// Bitmap height + /// Bitmap data + [System.Diagnostics.CodeAnalysis.SuppressMessage("Interoperability", + "CA1416:Validate platform compatibility", + Justification = "System.Drawing.EnableUnixSupport is enabled in the runtimeconfig.template.json. A true fix will be made soon.")] + public unsafe static void FromTensor(this byte[] tensor, int width, int height, BitmapData bmData) + { + // params + int stride = bmData.Stride; + byte* p = (byte*)bmData.Scan0.ToPointer(); + int pos = 0; + + // do job + for (int j = 0; j < height; j++) + { + int k, jstride = j * stride; + + for (int i = 0; i < width; i++) + { + k = jstride + i * 3; + + // rgb + p[k + 2] = tensor[pos++]; + p[k + 1] = tensor[pos++]; + p[k + 0] = tensor[pos++]; + } + } + + return; + } + /// + /// Converts an RGB tensor array to a color image. + /// + /// RGBA tensor array + /// Bitmap width + /// Bitmap height + /// Bitmap + public static void FromTensor(this byte[] tensor, int width, int height, Bitmap Data) + { + BitmapData bmData = Onnx.Lock24bpp(Data); + FromTensor(tensor, width, height, bmData); + Onnx.Unlock(Data, bmData); + return; + } + #endregion + + #region BitmapData voids + /// + /// Blocks Bitmap in system memory. + /// + /// Bitmap + /// Bitmap data + [System.Diagnostics.CodeAnalysis.SuppressMessage("Interoperability", + "CA1416:Validate platform compatibility", + Justification = "System.Drawing.EnableUnixSupport is enabled in the runtimeconfig.template.json. A true fix will be made soon.")] + public static BitmapData Lock24bpp(this Bitmap b) + { + return b.LockBits(new Rectangle(0, 0, b.Width, b.Height), ImageLockMode.ReadWrite, PixelFormat.Format24bppRgb); + } + /// + /// Unblocks Bitmap in system memory. + /// + /// Bitmap + /// Bitmap data + [System.Diagnostics.CodeAnalysis.SuppressMessage("Interoperability", + "CA1416:Validate platform compatibility", + Justification = "System.Drawing.EnableUnixSupport is enabled in the runtimeconfig.template.json. A true fix will be made soon.")] + public static void Unlock(this Bitmap b, BitmapData bmData) + { + b.UnlockBits(bmData); + return; + } + #endregion + } +} diff --git a/src/AnalysisLayer/PortraitFilter/Lib/PortraitModeFilter.cs b/src/AnalysisLayer/PortraitFilter/Lib/PortraitModeFilter.cs new file mode 100644 index 00000000..8a84a087 --- /dev/null +++ b/src/AnalysisLayer/PortraitFilter/Lib/PortraitModeFilter.cs @@ -0,0 +1,92 @@ +using System.Diagnostics; +using System.Drawing; +using UMapx.Core; +using UMapx.Imaging; + +namespace CodeProject.SenseAI.AnalysisLayer.PortraitFilter +{ + /// + /// Defines "portrait mode" filter. + /// + public class PortraitModeFilter + { + #region Private data + BoxBlur _boxBlur; + AlphaChannelFilter _alphaChannelFilter; + Merge _merge; + float _strength; + #endregion + + #region Class components + /// + /// Initializes "portrait mode" filter. + /// + /// Strength + public PortraitModeFilter(float strength) + { + _boxBlur = new BoxBlur(); + _alphaChannelFilter = new AlphaChannelFilter(); + _merge = new Merge(0, 0, 255); + _strength = strength; + } + /// + /// Gets or sets strength. + /// + public float Strength + { + get + { + return _strength; + } + set + { + _strength = Maths.Float(value); + } + } + + + /// + /// Applies filter to image. + /// + /// Input image + /// Segmentation mask + /// Portrait image + [System.Diagnostics.CodeAnalysis.SuppressMessage("Interoperability", + "CA1416:Validate platform compatibility", + Justification = "System.Drawing.EnableUnixSupport is enabled in the runtimeconfig.template.json. A true fix will be made soon.")] + public Bitmap Apply(Bitmap image, Bitmap mask) + { + // time + int tic = Environment.TickCount; + Console.WriteLine("Applying portrait mode filter..."); + + // deep person lab + Bitmap alphaMask = (Bitmap)image.Clone(); + Bitmap portrait = (Bitmap)image.Clone(); + Bitmap segmentantionMask = (Bitmap)mask.Clone(); + + // radius calculation + int radius = (int)(_strength * 2 * (( Math.Max(image.Height, image.Width) / 100 ) + 1)); + Console.WriteLine($"Blur radius --> {radius}"); + + // gaussian blur approximation + _boxBlur.Size = new SizeInt(radius, radius); + _boxBlur.Apply(portrait); + _boxBlur.Apply(segmentantionMask); + + _boxBlur.Size = new SizeInt(radius / 2, radius / 2); + _boxBlur.Apply(portrait); + _boxBlur.Apply(segmentantionMask); + + // merging images + _alphaChannelFilter.Apply(alphaMask, segmentantionMask); + _merge.Apply(portrait, alphaMask); + alphaMask.Dispose(); + segmentantionMask.Dispose(); + Console.WriteLine($"Portrait mode filter was applied in {Environment.TickCount - tic} mls."); + + return portrait; + } + #endregion + } +} diff --git a/src/AnalysisLayer/PortraitFilter/Lib/deeplabv3_mnv2_pascal_train_aug.onnx b/src/AnalysisLayer/PortraitFilter/Lib/deeplabv3_mnv2_pascal_train_aug.onnx new file mode 100644 index 00000000..6335db87 Binary files /dev/null and b/src/AnalysisLayer/PortraitFilter/Lib/deeplabv3_mnv2_pascal_train_aug.onnx differ diff --git a/src/AnalysisLayer/PortraitFilter/PortraitFilter.csproj b/src/AnalysisLayer/PortraitFilter/PortraitFilter.csproj new file mode 100644 index 00000000..2ab641ad --- /dev/null +++ b/src/AnalysisLayer/PortraitFilter/PortraitFilter.csproj @@ -0,0 +1,46 @@ + + + + net6.0 + enable + dotnet-PortraitFilter-C4FF0E02-9BBA-46BE-B89E-9C2DA88576BD + enable + CodeProject.SenseAI.AnalysisLayer.PortraitFilter + PortraitFilter + 1.3.0.0 + Linux + ..\..\.. + favicon.ico + true + true + CodeProject + + + + + + + + + + + + + + + + + + + + + SettingsSingleFileGenerator + + + + + + Always + + + diff --git a/src/AnalysisLayer/PortraitFilter/PortraitFilterWorker.cs b/src/AnalysisLayer/PortraitFilter/PortraitFilterWorker.cs new file mode 100644 index 00000000..8c1690bd --- /dev/null +++ b/src/AnalysisLayer/PortraitFilter/PortraitFilterWorker.cs @@ -0,0 +1,241 @@ +using CodeProject.SenseAI.AnalysisLayer.SDK; + +using SkiaSharp; +using SkiaSharp.Views.Desktop; + +using System.Drawing; +using System.Net.Http.Json; + +namespace CodeProject.SenseAI.AnalysisLayer.PortraitFilter +{ + class PortraitResponse : BackendSuccessResponse + { + public byte[]? filtered_image { get; set; } + } + + public class PortraitFilterWorker : BackgroundService + { + private const string _modelPath = "Lib\\deeplabv3_mnv2_pascal_train_aug.onnx"; + + private string _queueName = "portraitfilter_queue"; + private string _moduleId = "portrait-mode"; + + private int _parallelism = 1; // 4 also seems to be good on my machine. + private readonly ILogger _logger; + private readonly SenseAIClient _senseAI; + private DeepPersonLab _deepPersonLab; + + /// + /// Initializes a new instance of the PortraitFilterWorker. + /// + /// The Logger. + /// The app configuration values. + public PortraitFilterWorker(ILogger logger, + IConfiguration configuration) + { + _logger = logger; + + int port = configuration.GetValue("PORT"); + if (port == default) + port = 5000; + + // TODO: It would be really nice to have the server tell the module the name of the + // queue that they should be processing. The queue name is in the RouteMap, with a + // different queue for each route. While this provides flexibility, it means we have to + // hardcode the queue into the analysis module and ensure it is always the same as the + // value in the modulesettings file. Maybe, for now, have the queue name be defined at + // the module level in modulesettings, so it's shared among all routes. If a module + // requires more than one queue then it's probably breaking the Single Resonsibility + // principle. + // + // Because the Modules are not always started by the Server, for debugging and mesh, we + // would need the Module to register their route info, including the queue, at startup. + // The frontend can still start any modules it discovers, but the registration should + // be done by the Module. + // + // Notes: + // ModuleId: This needs to be unique across Modules, but the same for all instances of + // same Module type. Because we want one Queue per Module type, this could effectively + // be used as the Queue selector. ModuleIds could become a GUID. + // + // TODO: Move the Queue name up to the Module level. + + // Note that looking up MODULE_QUEUE will currently always return null. It's here as an + // annoying reminder. + _queueName = configuration.GetValue("MODULE_QUEUE"); + if (_queueName == default) + _queueName = "portraitfilter_queue"; + + _moduleId = configuration.GetValue("MODULE_ID"); + if (_moduleId == default) + _moduleId = "PortraitFilter"; + + _senseAI = new SenseAIClient($"http://localhost:{port}/" +#if DEBUG + , TimeSpan.FromMinutes(1) +#endif + ); + + _deepPersonLab = new DeepPersonLab( + "Lib\\deeplabv3_mnv2_pascal_train_aug.onnx".Replace('\\', Path.DirectorySeparatorChar)); + } + + /// + /// Start the process. + /// + /// The cancellation token. + /// + protected override async Task ExecuteAsync(CancellationToken token) + { + await Task.Delay(1_000, token).ConfigureAwait(false); + + _logger.LogInformation("Background Portrait Filter Task Started."); + await _senseAI.LogToServer("SenseAI Portrait Filter module started.", token); + + List tasks = new List(); + for (int i = 0; i < _parallelism; i++) + tasks.Add(ProcessQueue(token)); + + await Task.WhenAll(tasks).ConfigureAwait(false); + } + + private async Task ProcessQueue(CancellationToken token) + { + PortraitModeFilter portraitModeFilter = new PortraitModeFilter(0.0f); + while (!token.IsCancellationRequested) + { + // _logger.LogInformation("Checking Portrait Filter queue."); + + BackendResponseBase response; + BackendRequest? request = null; + try + { + request = await _senseAI.GetRequest(_queueName, _moduleId, token); + } + catch (Exception ex) + { + _logger.LogInformation(ex, "Portrait Filter Exception"); + continue; + } + + if (request is null) + continue; + + // ignore the command as only one command + + // ignoring the file name + var file = request.payload?.files?.FirstOrDefault(); + var strengthStr = request.payload?.values? + .FirstOrDefault(x => x.Key == "strength") + .Value?[0] ?? "0.5"; + + if (!float.TryParse(strengthStr, out var strength)) + strength = 0.5f; + + if (file?.data is null) + { + await _senseAI.LogToServer("Portrait Filter File or file data is null.", token); + response = new BackendErrorResponse(-1, "Portrait Filter Invalid File."); + } + else + { + _logger.LogInformation($"Processing {file.filename}"); + // Do the processing here + + // dummy result + byte[]? result = null; + + try + { + var imageData = file.data; + var image = GetImage(imageData); + + if (image is not null) + { + var mask = _deepPersonLab.Fit(image); + if (mask is not null) + { + portraitModeFilter.Strength = strength; + var filteredImage = portraitModeFilter.Apply(image, mask); + result = ImageToByteArray(filteredImage); + } + + } + + // yoloResult = _objectDetector.Predict(imageData); + } + catch (Exception ex) + { + await _senseAI.LogToServer($"Portrait Filter Error for {file.filename}.", token); + _logger.LogError(ex, "Portrait Filter Exception"); + result = null; + } + + if (result is null) + { + response = new BackendErrorResponse(-1, "Portrait Filter returned null."); + } + else + { + response = new PortraitResponse + { + filtered_image = result + }; + } + } + + HttpContent? content = null; + if (response is PortraitResponse portraitResponse) + content = JsonContent.Create(portraitResponse); + else + content = JsonContent.Create(response as BackendErrorResponse); + + await _senseAI.SendResponse(request.reqid, _moduleId, content, token); + } + } + + /// + /// Stop the process. Does nothing. + /// + /// The stopping cancellation token. + /// + public override async Task StopAsync(CancellationToken token) + { + _logger.LogInformation("Background Portrait Filter Task is stopping."); + + await base.StopAsync(token); + } + + // Using SkiaSharp as it handles more formats. + private static Bitmap? GetImage(byte[] imageData) + { + if (imageData == null) + return null; + + var skiaImage = SKImage.FromEncodedData(imageData); + if (skiaImage is null) + return null; + + return skiaImage.ToBitmap(); + } + + public static byte[]? ImageToByteArray(Image img) + { + if (img is null) + return null; + + using var stream = new MemoryStream(); + + // We'll disabled the warnings around the cross platform issues with System.Drawing. + // We have enabled System.Drawing.EnableUnixSupport in the runtimeconfig.template.json + // file, but understand that in .NET7 that option won't be available. We will port to + // a different libary in the future. For more info see + // https://github.com/dotnet/designs/blob/main/accepted/2021/system-drawing-win-only/system-drawing-win-only.md + #pragma warning disable CA1416 // Validate platform compatibility + img.Save(stream, System.Drawing.Imaging.ImageFormat.Png); + #pragma warning restore CA1416 // Validate platform compatibility + + return stream.ToArray(); + } + } +} \ No newline at end of file diff --git a/src/AnalysisLayer/PortraitFilter/Program.cs b/src/AnalysisLayer/PortraitFilter/Program.cs new file mode 100644 index 00000000..da631ec5 --- /dev/null +++ b/src/AnalysisLayer/PortraitFilter/Program.cs @@ -0,0 +1,10 @@ +using CodeProject.SenseAI.AnalysisLayer.PortraitFilter; + +IHost host = Host.CreateDefaultBuilder(args) + .ConfigureServices(services => + { + services.AddHostedService(); + }) + .Build(); + +await host.RunAsync(); diff --git a/src/AnalysisLayer/PortraitFilter/Properties/launchSettings.json b/src/AnalysisLayer/PortraitFilter/Properties/launchSettings.json new file mode 100644 index 00000000..ef5360e8 --- /dev/null +++ b/src/AnalysisLayer/PortraitFilter/Properties/launchSettings.json @@ -0,0 +1,11 @@ +{ + "profiles": { + "PortraitFilter": { + "commandName": "Project", + "dotnetRunMessages": true, + "environmentVariables": { + "DOTNET_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/AnalysisLayer/PortraitFilter/appsettings.Development.json b/src/AnalysisLayer/PortraitFilter/appsettings.Development.json new file mode 100644 index 00000000..b2dcdb67 --- /dev/null +++ b/src/AnalysisLayer/PortraitFilter/appsettings.Development.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/AnalysisLayer/PortraitFilter/appsettings.json b/src/AnalysisLayer/PortraitFilter/appsettings.json new file mode 100644 index 00000000..b2dcdb67 --- /dev/null +++ b/src/AnalysisLayer/PortraitFilter/appsettings.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/AnalysisLayer/PortraitFilter/favicon.ico b/src/AnalysisLayer/PortraitFilter/favicon.ico new file mode 100644 index 00000000..f92489f7 Binary files /dev/null and b/src/AnalysisLayer/PortraitFilter/favicon.ico differ diff --git a/src/AnalysisLayer/PortraitFilter/modulesettings.development.json b/src/AnalysisLayer/PortraitFilter/modulesettings.development.json new file mode 100644 index 00000000..3edf5492 --- /dev/null +++ b/src/AnalysisLayer/PortraitFilter/modulesettings.development.json @@ -0,0 +1,7 @@ +{ + "Modules": { + "PortraitFilter": { + "FilePath": "PortraitFilter\\bin\\debug\\net6.0\\PortraitFilter.dll" + } + } +} diff --git a/src/AnalysisLayer/PortraitFilter/modulesettings.json b/src/AnalysisLayer/PortraitFilter/modulesettings.json new file mode 100644 index 00000000..b40b843f --- /dev/null +++ b/src/AnalysisLayer/PortraitFilter/modulesettings.json @@ -0,0 +1,46 @@ +{ + // The processes (typically the backend analysis processes) that are to be started when the + // server starts. They will be started in order of appearance. + "Modules": { + "PortraitFilter": { + "Name": "Portrait Filter", + "Activate": true, + "Description": "Blurs the background behind people in an image.", + "FilePath": "PortraitFilter\\PortraitFilter.dll", + "Runtime": "dotnet", + "Platforms": [ "windows", "linux", "docker" ], + "RouteMaps": [ + { + "Path": "image/portraitfilter", + "Queue": "portraitfilter_queue", + "Command": "filter", + "Description": "Blurs the background behind people in an image.", + "Inputs": [ + { + "Name": "image", + "Type": "File", + "Description": "The image to be filtered." + }, + { + "Name": "strength", + "Type": "Float", + "Description": "How much to blur the background (0.0 - 1.0). Default 0.5." + } + ], + "Outputs": [ + { + "Name": "success", + "Type": "Boolean", + "Description": "True if successful." + }, + { + "Name": "filtered_image", + "Type": "File", + "Description": "The filtered image." + } + ] + } + ] + } + } +} \ No newline at end of file diff --git a/src/AnalysisLayer/PortraitFilter/modulesettings.windows.development.json b/src/AnalysisLayer/PortraitFilter/modulesettings.windows.development.json new file mode 100644 index 00000000..ae0fc3c0 --- /dev/null +++ b/src/AnalysisLayer/PortraitFilter/modulesettings.windows.development.json @@ -0,0 +1,9 @@ +{ +/* + "Modules": { + "PortraitFilter": { + "FilePath": "PortraitFilter\\PortraitFilter.exe" + } + } + */ +} diff --git a/src/AnalysisLayer/PortraitFilter/runtimeconfig.template.json b/src/AnalysisLayer/PortraitFilter/runtimeconfig.template.json new file mode 100644 index 00000000..a90398fb --- /dev/null +++ b/src/AnalysisLayer/PortraitFilter/runtimeconfig.template.json @@ -0,0 +1,5 @@ +{ + "configProperties": { + "System.Drawing.EnableUnixSupport": true + } +} \ No newline at end of file diff --git a/src/AnalysisLayer/PortraitFilter/test.html b/src/AnalysisLayer/PortraitFilter/test.html new file mode 100644 index 00000000..61bcbf79 --- /dev/null +++ b/src/AnalysisLayer/PortraitFilter/test.html @@ -0,0 +1,143 @@ + + + + + + + + + Portrait Filter Test Page + + + + + + + +
+

Portrait Filter Test Page

+
+
+
+ + +
+
+ + +
+
+
+
+ +
+
+
+
+ +
+ + +
+
+ + +
+
+ + + + \ No newline at end of file diff --git a/src/AnalysisLayer/SDK/ModelRunners/Yolov5Net.Scorer/Yolov5Net.Scorer.csproj b/src/AnalysisLayer/SDK/ModelRunners/Yolov5Net.Scorer/Yolov5Net.Scorer.csproj index e40321f8..ff40e13b 100644 --- a/src/AnalysisLayer/SDK/ModelRunners/Yolov5Net.Scorer/Yolov5Net.Scorer.csproj +++ b/src/AnalysisLayer/SDK/ModelRunners/Yolov5Net.Scorer/Yolov5Net.Scorer.csproj @@ -30,8 +30,8 @@
- - + + diff --git a/src/API/Server/Backend/BackendRequests.cs b/src/AnalysisLayer/SDK/NET/CodeProject.SenseAI.AnalysisLayer.SDK/BackendRequests.cs similarity index 93% rename from src/API/Server/Backend/BackendRequests.cs rename to src/AnalysisLayer/SDK/NET/CodeProject.SenseAI.AnalysisLayer.SDK/BackendRequests.cs index c3d0a1bc..73083a26 100644 --- a/src/API/Server/Backend/BackendRequests.cs +++ b/src/AnalysisLayer/SDK/NET/CodeProject.SenseAI.AnalysisLayer.SDK/BackendRequests.cs @@ -2,7 +2,7 @@ using System.Collections.Generic; using System.Text.Json.Serialization; -namespace CodeProject.SenseAI.API.Server.Backend +namespace CodeProject.SenseAI.AnalysisLayer.SDK { #pragma warning disable IDE1006 // Naming Styles @@ -54,6 +54,11 @@ public class RequestPayload ///
public string? command { get; set; } + /// + /// Gets or sets the queue name. + /// + public string? queue { get; set; } + /// /// Gets or sets the set of key-value pairs passed by a client as part of a request. /// diff --git a/src/API/Server/Backend/BackendResponses.cs b/src/AnalysisLayer/SDK/NET/CodeProject.SenseAI.AnalysisLayer.SDK/BackendResponses.cs similarity index 63% rename from src/API/Server/Backend/BackendResponses.cs rename to src/AnalysisLayer/SDK/NET/CodeProject.SenseAI.AnalysisLayer.SDK/BackendResponses.cs index c05e7011..d33b25b8 100644 --- a/src/API/Server/Backend/BackendResponses.cs +++ b/src/AnalysisLayer/SDK/NET/CodeProject.SenseAI.AnalysisLayer.SDK/BackendResponses.cs @@ -1,4 +1,4 @@ -namespace CodeProject.SenseAI.API.Server.Backend +namespace CodeProject.SenseAI.AnalysisLayer.SDK { #pragma warning disable IDE1006 // Naming Styles public class BackendResponseBase @@ -50,6 +50,34 @@ public BackendSuccessResponse() success = true; } } + public class BoundingBoxPrediction + { + /// + /// Gets or sets the confidence in the detection response + /// + public float confidence { get; set; } + + /// + /// Gets or sets the lower y coordinate of the bounding box + /// + public int y_min { get; set; } + + /// + /// Gets or sets the lower x coordinate of the bounding box + /// + public int x_min { get; set; } + + /// + /// Gets or sets the upper y coordinate of the bounding box + /// + public int y_max { get; set; } + + /// + /// Gets or sets the upper x coordinate of the bounding box + /// + public int x_max { get; set; } + } + #pragma warning restore IDE1006 // Naming Styles } diff --git a/src/AnalysisLayer/SDK/NET/CodeProject.SenseAI.AnalysisLayer.SDK/CodeProject.SenseAI.AnalysisLayer.SDK.csproj b/src/AnalysisLayer/SDK/NET/CodeProject.SenseAI.AnalysisLayer.SDK/CodeProject.SenseAI.AnalysisLayer.SDK.csproj new file mode 100644 index 00000000..132c02c5 --- /dev/null +++ b/src/AnalysisLayer/SDK/NET/CodeProject.SenseAI.AnalysisLayer.SDK/CodeProject.SenseAI.AnalysisLayer.SDK.csproj @@ -0,0 +1,9 @@ + + + + net6.0 + enable + enable + + + diff --git a/src/AnalysisLayer/SDK/NET/CodeProject.SenseAI.AnalysisLayer.SDK/SenseAIClient.cs b/src/AnalysisLayer/SDK/NET/CodeProject.SenseAI.AnalysisLayer.SDK/SenseAIClient.cs new file mode 100644 index 00000000..ce63fa78 --- /dev/null +++ b/src/AnalysisLayer/SDK/NET/CodeProject.SenseAI.AnalysisLayer.SDK/SenseAIClient.cs @@ -0,0 +1,79 @@ + +using System.Text.Json; + +namespace CodeProject.SenseAI.AnalysisLayer.SDK +{ + public class SenseAIClient + { + private static HttpClient? _httpClient; + public SenseAIClient(string url, TimeSpan timeout = default) + { + _httpClient ??= new HttpClient + { + BaseAddress = new Uri(url), + Timeout = (timeout == default) ? TimeSpan.FromMinutes(1) : timeout + }; + } + + /// + /// Get a request from the SenseAI Server queue. + /// + /// The Queue Name. + /// The Id of the module making this request + /// A Cancellation Token. + /// The BackendRequest or Null if error + public async Task GetRequest(string queueName, string moduleId, + CancellationToken token = default) + { + // We're passing the moduleID as part of the GET request in order to give the server a + // hint that this module is alive and well. + BackendRequest? request = null; + var httpResponse = await _httpClient!.GetAsync($"v1/queue/{queueName}?moduleid={moduleId}", token) + .ConfigureAwait(false); + + if (httpResponse is not null && + httpResponse.StatusCode == System.Net.HttpStatusCode.OK) + { + var jsonString = await httpResponse.Content.ReadAsStringAsync(token) + .ConfigureAwait(false); + + request = JsonSerializer.Deserialize(jsonString, + new JsonSerializerOptions(JsonSerializerDefaults.Web)); + } + + return request; + } + + /// + /// Sends a response for a request to the SenseAI Server. + /// + /// The Request ID. + /// The Id of the module making this request + /// The content to send. + /// A Cancellation Token. + /// A Task. + public async Task SendResponse(string reqid, string moduleId, HttpContent content, + CancellationToken token) + { + await _httpClient!.PostAsync($"v1/queue/{reqid}?moduleid={moduleId}", content, token) + .ConfigureAwait(false); + } + + /// + /// Logs a message to the SenseAI Server. + /// + /// The Message. + /// A Cancellation Token. + /// A Task. + public async Task LogToServer(string message, CancellationToken token) + { + var form = new FormUrlEncodedContent(new[] + { new KeyValuePair("entry", message)} + ); + + /*var response = */ + await _httpClient!.PostAsync($"v1/log", form, token) + .ConfigureAwait(false); + } + } +} \ No newline at end of file diff --git a/src/AnalysisLayer/SDK/Python/requirements.txt b/src/AnalysisLayer/SDK/Python/requirements.txt new file mode 100644 index 00000000..40914a40 --- /dev/null +++ b/src/AnalysisLayer/SDK/Python/requirements.txt @@ -0,0 +1,5 @@ +#! Python3 + +## SenseAI +requests # Installing Requests, the HTTP library +Pillow # Installing Pillow, a Python Image Library \ No newline at end of file diff --git a/src/AnalysisLayer/TextSummary/senseAI.py b/src/AnalysisLayer/SDK/Python/senseAI.py similarity index 75% rename from src/AnalysisLayer/TextSummary/senseAI.py rename to src/AnalysisLayer/SDK/Python/senseAI.py index 542cf816..eb3a2c5e 100644 --- a/src/AnalysisLayer/TextSummary/senseAI.py +++ b/src/AnalysisLayer/SDK/Python/senseAI.py @@ -1,3 +1,5 @@ +## Trying to put this in a common place, but the import system is very strange. + import os import io import sys @@ -6,18 +8,18 @@ from datetime import datetime from enum import Flag, unique -# Add to the package search path the path of the packages within our local virtual environment -if sys.platform.startswith('linux'): - currentPythonDir = os.path.normpath(os.path.join(os.getcwd(), "../../bin/linux/python37")) -elif sys.platform.startswith('darwin'): - currentPythonDir = os.path.normpath(os.path.join(os.getcwd(), "../../bin/osx/python37")) -elif sys.platform.startswith('win'): - currentPythonDir = os.path.normpath(os.path.join(os.getcwd(), "..\\..\\bin\\win\\python37")) -else: - currentPythonDir = "" +# The purpose of inserting the path is so the Python import system looks in the right spot for packages. +# ie .../pythonXX/venv/Lib/site-packages. +# This depends on the VENV we're actually running in. So: get the location of the current exe +# and work from that. +# Get the location of the current python interpreter, and then add the site-packages associated +# with that interpreter to the PATH so python will find the packages we've installed +currentPythonDir = os.path.join(os.path.dirname(sys.executable)) if currentPythonDir != "": - sys.path.insert(0, currentPythonDir) + package_path = os.path.normpath(os.path.join(currentPythonDir, '../lib/python' + sys.version[:3] + '/site-packages/')) + sys.path.insert(0, package_path) + # print("Adding " + package_path + " to packages search path") import requests from PIL import Image @@ -47,16 +49,19 @@ class SenseAIBackend: Server """ - pythonDir = currentPythonDir - virtualEnv = os.getenv("VIRTUAL_ENV", f"{pythonDir}/venv") - errLog_APIkey = os.getenv("ERRLOG_APIKEY", "") - port = os.getenv("PORT", "5000") + pythonDir = currentPythonDir + virtualEnv = os.getenv("VIRTUAL_ENV", f"{pythonDir}/venv") + errLog_APIkey = os.getenv("ERRLOG_APIKEY", "") + port = os.getenv("PORT", "5000") + + moduleId = os.getenv("MODULE_ID", "senseAI") - errorPause = 1.0 - logTimingEvents = True + errorPause = 1.0 + logTimingEvents = True + verboseExceptionMsg = True - BaseQueueUrl = f"http://localhost:{port}/v1/queue/" - BaseLogUrl = f"http://localhost:{port}/v1/log/" + BaseQueueUrl = f"http://localhost:{port}/v1/queue/" + BaseLogUrl = f"http://localhost:{port}/v1/log/" requestSession = requests.Session() @@ -108,7 +113,7 @@ def getCommand(self, queueName : str) -> "list[str]": try: cmdTimer = self.startTimer(f"Getting Command from {queueName}") response = self.requestSession.get( - self.BaseQueueUrl + queueName, + self.BaseQueueUrl + queueName + "?moduleId=" + self.moduleId, timeout=30, verify=False ) @@ -122,8 +127,13 @@ def getCommand(self, queueName : str) -> "list[str]": return [] except Exception as ex: + + err_msg = "Error retrieving command: Is the API Server running?" + if verboseExceptionMsg: + err_msg = str(ex) + self.log(LogMethod.Error|LogMethod.Cloud, { - "message": "Error retrieving command: Is the API Server running?", + "message": err_msg, "method": "getCommand", "process": queueName, "file": "senseAI.py", @@ -144,36 +154,35 @@ def getImageFromRequest(self, req_data: JSON, index : int) -> Image: Param: index - the index of the image to return Returns: An image if succesful; None otherwise. """ - payload = req_data.get("payload", None) - if payload is None: - return None - - files = payload.get("files", None) - if files is None: - return None - - img_file = files.get(index, None) - if img_file is None: - return None - - img_dataB64 = img_file.get("data", None) - if img_dataB64 is None: - return None - + queue = "N/A" + payload = None try: + payload = req_data["payload"] + queueName = payload.get("queue","N/A") + files = payload["files"] + img_file = files[index] + img_dataB64 = img_file["data"] img_bytes = base64.b64decode(img_dataB64) img_stream = io.BytesIO(img_bytes) img = Image.open(img_stream).convert("RGB") + return img + except: - self.log(LogMethod.Error|LogMethod.Cloud, { - "message": "Unable to get image from request", + err_msg = "Unable to get image from request" + if verboseExceptionMsg: + err_msg = str(ex) + + self.log(LogMethod.Error|LogMethod.Server|LogMethod.Cloud, { + "message": err_msg, "method": "getImageFromRequest", "process": queueName, "file": "senseAI.py", "exception_type": "Exception" }) + return None + def getRequestImageCount(self, req_data: JSON) -> int: """ @@ -181,21 +190,18 @@ def getRequestImageCount(self, req_data: JSON) -> int: Param: req_data - the request data from the HTTP form Returns: The number of images if successful; 0 otherwise. """ + try: + # req_data is a dict + payload = req_data["payload"] + # payload is also a dict + files = payload["files"] + return len(files) - # req_data is a dict - payload = req_data.get("payload", None) - if payload is None: - return 0 - - # payload is also a dict - files = payload.get("files", None) - if files is None: + except Exception as ex: + if verboseExceptionMsg: + print(f"Error getting getRequestImageCount: {str(ex)}") return 0 - # files is a list - return len(files) - - def getRequestValue(self, req_data, key : str, defaultValue : str = None): """ Gets a value from the HTTP request Form send by the client @@ -207,16 +213,13 @@ def getRequestValue(self, req_data, key : str, defaultValue : str = None): unique IDs. The form will contain an array of values for the shared name. WE ONLY RETURN THE FIRST VALUE HERE. """ + + # self.log(LogMethod.Info, {"message": f"Getting request for module {self.moduleId}"}) + try: # req_data is a dict - payload = req_data.get("payload", None) - if payload is None: - return defaultValue - - # payload is also a dict - valueList = payload.get("values", None) - if valueList is None: - return defaultValue + payload = req_data["payload"] + valueList = payload["values"] # valueList is a list. Note that in a HTML form, each element may have multiple values for value in valueList: @@ -225,11 +228,12 @@ def getRequestValue(self, req_data, key : str, defaultValue : str = None): return defaultValue - except: + except Exception as ex: + if verboseExceptionMsg: + print(f"Error getting getRequestValue: {str(ex)}") return defaultValue def sendResponse(self, req_id : str, body : str) -> bool: - """ Sends the result of a comment to the analysis services back to the API server who will then pass this result back to the original calling client. SenseAI works on the basis of @@ -242,14 +246,14 @@ def sendResponse(self, req_id : str, body : str) -> bool: Returns True on success; False otherwise """ + # self.log(LogMethod.Info, {"message": f"Sending response for module {self.moduleId}"}) + success = False responseTimer = self.startTimer("Sending Response") try: - # self.log(LogMethod.Info, {"message": f"Sending response for id: {req_id}"}) - self.requestSession.post( - self.BaseQueueUrl + req_id, + self.BaseQueueUrl + req_id + "?moduleId=" + self.moduleId, data = body, timeout=1, verify=False) @@ -258,8 +262,11 @@ def sendResponse(self, req_id : str, body : str) -> bool: except Exception as ex: time.sleep(self.errorPause) - print(f"Error sending response: {str(ex)}") - # print(f"Error sending response: Is the API Server running?") + + if verboseExceptionMsg: + print(f"Error sending response: {str(ex)}") + else: + print(f"Error sending response: Is the API Server running?") finally: if success: @@ -337,8 +344,10 @@ def _serverLog(self, entry : str) -> bool: return True except Exception as ex: - # print(f"Error posting log: {str(ex)}") - print(f"Error posting log: Is the API Server running?") + if verboseExceptionMsg: + print(f"Error posting log: {str(ex)}") + else: + print(f"Error posting log: Is the API Server running?") return False @@ -347,7 +356,7 @@ def _cloudLog(self, process: str, method: str, file: str, message: str, exceptio Logs an error to our remote logging server (errLog.io) Param: process - The name of the current process Param: method - The name of the current method - Param: file- The name of the current file + Param: file - The name of the current file Param: message - The message to log Param: exception_type - The exception type if this logging is the result of an exception """ @@ -372,6 +381,13 @@ def _cloudLog(self, process: str, method: str, file: str, message: str, exceptio # print "Json Data: ", data headers = {'Content-Type': 'application/json','Accept': 'application/json'} - response = requests.post(url, data = obj, headers = headers) + try: + response = requests.post(url, data = obj, headers = headers) + except Exception as ex: + if verboseExceptionMsg: + print(f"Error posting server log: {str(ex)}") + else: + print(f"Error posting server log: Do you have interwebz?") + return False return response.status_code == 200 \ No newline at end of file diff --git a/src/AnalysisLayer/TextSummary/TextSummary.pyproj b/src/AnalysisLayer/TextSummary/TextSummary.pyproj index e8a28673..74f6b767 100644 --- a/src/AnalysisLayer/TextSummary/TextSummary.pyproj +++ b/src/AnalysisLayer/TextSummary/TextSummary.pyproj @@ -6,12 +6,13 @@ {470d3417-36a4-49a4-b719-496466fa92fb} textsummary.py - + ..\SDK\Python . . {888888a0-9f3d-457c-b088-3a5042f75d52} Standard Python launcher MSBuild|env|$(MSBuildProjectFullPath) + False @@ -19,10 +20,11 @@ 10.0 + + - diff --git a/src/AnalysisLayer/TextSummary/__init__.py b/src/AnalysisLayer/TextSummary/__init__.py index 139597f9..8b137891 100644 --- a/src/AnalysisLayer/TextSummary/__init__.py +++ b/src/AnalysisLayer/TextSummary/__init__.py @@ -1,2 +1 @@ - diff --git a/src/AnalysisLayer/TextSummary/modulesettings.json b/src/AnalysisLayer/TextSummary/modulesettings.json new file mode 100644 index 00000000..7021c1c9 --- /dev/null +++ b/src/AnalysisLayer/TextSummary/modulesettings.json @@ -0,0 +1,49 @@ +{ + "Modules": { + "TextSummary": { + "Name": "Text Summary", + "Activate": true, + "Description": "Summarizes some content by selecting a number of sentences that are most representitive of the content.", + "FilePath": "TextSummary\\textsummary.py", + "Runtime": "python38", + "Platforms": [ "windows", "linux", "macos", "docker" ], + + "EnvironmentVariables": { + "NLTK_DATA": "%MODULES_PATH%\\TextSummary\\nltk_data" + }, + + "RouteMaps": [ + { + "Path": "text/summarize", + "Queue": "summary_queue", + "Command": "summarize", + "Description": "Summarizes some content by selecting a number of sentences that are most representitive of the content.", + "Inputs": [ + { + "Name": "text", + "Type": "Text", + "Description": "The text to be summarized" + }, + { + "Name": "num_sentences", + "Type": "Integer", + "Description": "The number of sentences to produce." + } + ], + "Outputs": [ + { + "Name": "success", + "Type": "Boolean", + "Description": "True if successful." + }, + { + "Name": "summary", + "Type": "Text", + "Description": "The summarized text." + } + ] + } + ] + } + } +} diff --git a/src/AnalysisLayer/TextSummary/modulesettings.windows.json b/src/AnalysisLayer/TextSummary/modulesettings.windows.json new file mode 100644 index 00000000..40667dbf --- /dev/null +++ b/src/AnalysisLayer/TextSummary/modulesettings.windows.json @@ -0,0 +1,9 @@ +{ + // We don't have a Python3.8 installer for windows, so let's use the existing Python3.7 that + // we know works + "Modules": { + "TextSummary": { + "Runtime": "python37" + } + } +} diff --git a/src/AnalysisLayer/TextSummary/requirements.txt b/src/AnalysisLayer/TextSummary/requirements.txt index e367a93d..fa915f32 100644 --- a/src/AnalysisLayer/TextSummary/requirements.txt +++ b/src/AnalysisLayer/TextSummary/requirements.txt @@ -1,10 +1,16 @@ #! Python3.7 ## SenseAI -requests # Installing Requests, the HTTP library -Pillow # Installing Pillow, a Python Image Library +requests # Installing Requests, the HTTP library +Pillow # Installing Pillow, a Python Image Library -nltk # Installing NLTK, the Natural Language Toolkit -numpy # Installing NumPy, the fundamental package for array computing with Python. -networkx # Installing NetworkX,a package for creating and manipulating graphs and networks -SciPy # Installing SciPy, a library for mathematics, science, and engineering +## NOTE: We don't really care about numpy here, BUT the bgrem module is a little picky. So we can +## stick to the lower version of numpy instead of the 1.22.3 that currently gets installed. +## However: this may not be needed if we're using pip -r requirements: it may figure out the +## issues for us. +## numpy==1.21.6 # Installing NumPy, the fundamental package for array computing with Python. +numpy + +nltk # Installing NLTK, the Natural Language Toolkit +networkx # Installing NetworkX, a package for creating and manipulating graphs and networks +SciPy # Installing SciPy, a library for mathematics, science, and engineering \ No newline at end of file diff --git a/src/AnalysisLayer/TextSummary/summarize.py b/src/AnalysisLayer/TextSummary/summarize.py index c8054277..852935c3 100644 --- a/src/AnalysisLayer/TextSummary/summarize.py +++ b/src/AnalysisLayer/TextSummary/summarize.py @@ -33,7 +33,7 @@ def read_article(self, file_name: str): # print(sentence) sentences.append(sentence.replace("[^a-zA-Z]", " ").split(" ")) - sentences.pop() + sentences.pop() return sentences @@ -53,7 +53,7 @@ def sentence_similarity(self, sent1:str, sent2:str): # build the vector for the second sentence for w in sent2: - vector2[all_words.index(w)] += 1 + vector2[all_words.index(w)] += 1 return 1 - cosine_distance(vector1, vector2) @@ -61,9 +61,8 @@ def remove_stop_words(self, sentences, stopwords:[str]=None): if stopwords is None: stopwords = [] stripped_sentences = [] - for sentence_index in range(len(sentences)): + for sentence in sentences: stripped_sentence = [] - sentence = sentences[sentence_index]; for word in sentence: # we want to ignore case when comparing the sentences @@ -76,7 +75,7 @@ def remove_stop_words(self, sentences, stopwords:[str]=None): stripped_sentences.append(stripped_sentence) return stripped_sentences - + def build_similarity_matrix(self, sentences, stop_words): # Remove the stop words once so we don't have to check # when evaluating each sentence multiple times. @@ -84,7 +83,7 @@ def build_similarity_matrix(self, sentences, stop_words): # Create an empty similarity matrix similarity_matrix = np.zeros((len(stripped_sentences), len(stripped_sentences))) - + # Optimize calculation as similarity(a,b) == similarity(b,a) for idx1 in range(len(stripped_sentences)): for idx2 in range(idx1, len(stripped_sentences)): @@ -92,7 +91,7 @@ def build_similarity_matrix(self, sentences, stop_words): continue similarity = self.sentence_similarity(stripped_sentences[idx1],stripped_sentences[idx2]) - similarity_matrix[idx1][idx2] = similarity + similarity_matrix[idx1][idx2] = similarity similarity_matrix[idx2][idx1] = similarity return similarity_matrix @@ -116,11 +115,11 @@ def generate_summary(self, sentences, top_n: int = 5): scores = nx.pagerank(sentence_similarity_graph) # Step 4 - Sort the rank and pick top sentences. Result is array of [rank, sentence] - ranked_sentence = sorted(((scores[i],s) for i,s in enumerate(sentences)), reverse=True) - #print("Indexes of top ranked_sentence order are ", ranked_sentence) + ranked_sentence = sorted(((scores[i],s) for i,s in enumerate(sentences)), reverse=True) + #print("Indexes of top ranked_sentence order are ", ranked_sentence) for i in range(top_n): - summarize_text.append(" ".join(ranked_sentence[i][1])) + summarize_text.append(" ".join(ranked_sentence[i][1])) # Step 5 - Output the summarize text summary = ". ".join(summarize_text) @@ -146,7 +145,7 @@ def generate_summary_from_text(self, text, top_n=5): # Step 1 - Split text into paragraphs paragraphs = text.split("\n") - + # Step 2 - Split paragraphs into sentences for paragraph in paragraphs: sublines = paragraph.split(". ") # sentences, really. @@ -157,5 +156,5 @@ def generate_summary_from_text(self, text, top_n=5): sentences.append(sentence) print("Number of sentences = ", len(sentences)) - + return self.generate_summary(sentences, top_n) diff --git a/src/AnalysisLayer/TextSummary/textsummary.py b/src/AnalysisLayer/TextSummary/textsummary.py index c3f0c9e3..2444cb65 100644 --- a/src/AnalysisLayer/TextSummary/textsummary.py +++ b/src/AnalysisLayer/TextSummary/textsummary.py @@ -1,6 +1,7 @@ #!/usr/bin/env python # coding: utf-8 - +import sys +sys.path.append("../SDK/Python") from senseAI import SenseAIBackend, LogMethod from summarize import Summarize @@ -16,9 +17,9 @@ def textsummary(thread_name): summary = Summarize() while True: - queue_entries: list[str] = senseAI.getCommand(TEXT_QUEUE); + queue_entries: list = senseAI.getCommand(TEXT_QUEUE) - if len(queue_entries) > 0: + if len(queue_entries) > 0: timer: tuple = senseAI.startTimer("Text Summary") for queue_entry in queue_entries: @@ -46,10 +47,10 @@ def textsummary(thread_name): # summaryText = summary.generate_summary_from_file(file_path, num_sentences) # If we're passing a file itself (generate_summary_from_textfile to be added) - # summaryText = summary.generate_summary_from_textfile(text_file, num_sentences) + # summaryText = summary.generate_summary_from_textfile(text_file, num_sentences) #print("Will summarize the text: ", req_text); - summaryText: str = summary.generate_summary_from_text(req_text, num_sentences) + summaryText: str = summary.generate_summary_from_text(req_text, num_sentences) output = {"success": True, "summary": summaryText} @@ -59,10 +60,10 @@ def textsummary(thread_name): output = {"success": False, "error": "unable to summarize", "code": 500} senseAI.log(LogMethod.Error | LogMethod.Cloud | LogMethod.Server, - { "process": "textsummary", + { "process": "textsummary", "file": "textsummary.py", "method": "textsummary", - "message": err_trace, + "message": err_trace, "exception_type": "Exception"}) finally: @@ -72,11 +73,11 @@ def textsummary(thread_name): senseAI.sendResponse(req_id, json.dumps(output)) except Exception: print("An exception occured") - + # if os.path.exists(file_path): # os.remove(file_path) if __name__ == "__main__": senseAI.log(LogMethod.Info | LogMethod.Server, {"message":"TextSummary module started."}) - textsummary("main_textsummary") \ No newline at end of file + textsummary("main_textsummary") diff --git a/src/AnalysisLayer/start-analysis.sh b/src/AnalysisLayer/start-analysis.sh index 1186cbaa..6eadb8cf 100644 --- a/src/AnalysisLayer/start-analysis.sh +++ b/src/AnalysisLayer/start-analysis.sh @@ -11,7 +11,7 @@ verbosity="info" # Get platform if [[ $OSTYPE == 'darwin'* ]]; then - platform="osx" + platform="macos" else platform="linux" fi diff --git a/src/clean.bat b/src/clean.bat new file mode 100644 index 00000000..9944b1c0 --- /dev/null +++ b/src/clean.bat @@ -0,0 +1,96 @@ +:: CodeProject SenseAI Server and Analysis modules: Cleans debris, properly, for clean build +:: +:: Usage: +:: clean_for_build.bat +:: +:: We assume we're in the /src directory + +@echo off +cls +setlocal enabledelayedexpansion + +set cleanBuild=false +set cleanInstallLocal=false +set cleanInstallAll=false + +if /i "%1" == "build" set cleanBuild=true +if /i "%1" == "install" set cleanInstallLocal=true +if /i "%1" == "installall" set cleanInstallAll=true + +if /i "%cleanInstallAll%" == "true" set cleanInstallLocal=true +if /i "%cleanInstallLocal%" == "true" set cleanBuild=true + +if /i "%cleanBuild%" == "true" ( + + echo. + echo Cleaning Build + echo. + + call :CleanSubDirs "." "bin" "AnalysisLayer\bin" + call :CleanSubDirs "." "obj" "AnalysisLayer\bin" + call :CleanSubDirs "..\Installers" "bin" + call :CleanSubDirs "..\Installers" "obj" + call :CleanSubDirs "..\demos" "bin" + call :CleanSubDirs "..\demos" "obj" + call :CleanSubDirs "..\tests" "bin" + call :CleanSubDirs "..\tests" "obj" +) + +if /i "%cleanInstallLocal%" == "true" ( + + echo. + echo Cleaning Windows install + echo. + + call :CleanSubDirs "AnalysisLayer\bin" "windows" "linux" + call :CleanSubDirs "AnalysisLayer\BackgroundRemover" "models" + call :CleanSubDirs "AnalysisLayer\CodeProject.SenseAI.AnalysisLayer.Yolo" "assets" + call :CleanSubDirs "AnalysisLayer\DeepStack" "assets" + call :CleanSubDirs "AnalysisLayer\DeepStack" "datastore" + call :CleanSubDirs "AnalysisLayer\DeepStack" "tempstore" +) + +if /i "%cleanInstallAll%" == "true" ( + + echo. + echo Cleaning install for other platforms + echo. + + call :CleanSubDirs "AnalysisLayer" "bin" +) + +goto:eof + + +:CleanSubDirs + SetLocal EnableDelayedExpansion + + set BasePath=%~1 + pushd !BasePath! + + set DirPattern=%~2 + set ExcludeDirPattern=%~3 + + REM Loop through all subdirs recursively + for /d /r %%i in (*!DirPattern!*) do ( + + set dirName=%%i + + REM Check for exclusions + set remove=true + if not "!ExcludeDirPattern!" == "" ( + if not "!dirName:%ExcludeDirPattern%=!" == "!dirName!" set remove=false + ) + + REM Do the deed + if /i "!remove!" == "true" ( + @rmdir /s /q "%%i" + echo Removed !dirName! + ) else ( + REM echo Not removing !dirName! + ) + ) + + popd + + exit /b diff --git a/tests/QueueServiceTests/QueueProcessing.cs b/tests/QueueServiceTests/QueueProcessing.cs index 4d9acb22..0d736cd7 100644 --- a/tests/QueueServiceTests/QueueProcessing.cs +++ b/tests/QueueServiceTests/QueueProcessing.cs @@ -7,6 +7,7 @@ using System.Threading.Tasks; using CodeProject.SenseAI.API.Server.Backend; +using CodeProject.SenseAI.AnalysisLayer.SDK; using Xunit; namespace QueueServiceTests @@ -24,17 +25,17 @@ public class TestQueuedResponse : BackendResponseBase } private const string QueueName = "testQueue"; - private class TestOptions : IOptions + private class TestOptions : IOptions { - public TestOptions(BackendOptions options) + public TestOptions(QueueProcessingOptions options) { Value = options; } - public BackendOptions Value { get; } + public QueueProcessingOptions Value { get; } } - private static readonly BackendOptions queueOptions = new() + private static readonly QueueProcessingOptions queueOptions = new() { MaxQueueLength = 10, ResponseTimeout = TimeSpan.FromSeconds(10)