From cdd7ab2b70f4c72a02ae1682d5c4485e954fb3c2 Mon Sep 17 00:00:00 2001 From: Bikram Chatterjee Date: Thu, 9 Apr 2020 18:02:26 +0200 Subject: [PATCH 01/72] dperl first integration --- priv/dashboard_scripts/dperl_dashboard.js | 1033 +++++++++++++++++++++ rebar.config | 2 +- src/dderl_sup.erl | 22 +- src/dperl.hrl | 316 +++++++ src/dperl_cp.erl | 287 ++++++ src/dperl_dal.erl | 727 +++++++++++++++ src/dperl_metrics.erl | 396 ++++++++ src/dperl_ora.erl | 703 ++++++++++++++ src/dperl_ora.hrl | 215 +++++ src/dperl_skvh_copy.erl | 255 +++++ src/dperl_status.hrl | 57 ++ src/dperl_status_agr.erl | 740 +++++++++++++++ src/dperl_status_pre.erl | 100 ++ src/dperl_status_pull.erl | 517 +++++++++++ src/dperl_status_push.erl | 151 +++ src/dperl_strategy_scr.erl | 875 +++++++++++++++++ src/dperl_sup.erl | 93 ++ src/dperl_worker.erl | 308 ++++++ src/dperl_worker_sup.erl | 31 + 19 files changed, 6820 insertions(+), 8 deletions(-) create mode 100644 priv/dashboard_scripts/dperl_dashboard.js create mode 100644 src/dperl.hrl create mode 100644 src/dperl_cp.erl create mode 100644 src/dperl_dal.erl create mode 100644 src/dperl_metrics.erl create mode 100644 src/dperl_ora.erl create mode 100644 src/dperl_ora.hrl create mode 100644 src/dperl_skvh_copy.erl create mode 100644 src/dperl_status.hrl create mode 100644 src/dperl_status_agr.erl create mode 100644 src/dperl_status_pre.erl create mode 100644 src/dperl_status_pull.erl create mode 100644 src/dperl_status_push.erl create mode 100644 src/dperl_strategy_scr.erl create mode 100644 src/dperl_sup.erl create mode 100644 src/dperl_worker.erl create mode 100644 src/dperl_worker_sup.erl diff --git a/priv/dashboard_scripts/dperl_dashboard.js b/priv/dashboard_scripts/dperl_dashboard.js new file mode 100644 index 00000000..0d164dd3 --- /dev/null +++ b/priv/dashboard_scripts/dperl_dashboard.js @@ -0,0 +1,1033 @@ +function init(container, width, height) { + "use strict"; + // This code is executed once and it should initialize the graph. + +// Dashboard main view, should be named: "cpro_dashboard": +/* + +select + ckey + , + cvalue + , + chash + from + CPROS + where + safe_string + ( + hd(ckey) + ) + <> + to_string('register') + and + safe_string + ( + nth(4,ckey) + ) + <> + to_string('focus') +*/ + + +// Focus sql view, should be named "cpro_dashboard_focus": +/* + +select + ckey + , + cvalue + , + chash + from + CPROS + where + safe_string + ( + hd(ckey) + ) + <> + to_string('register') + and + safe_string + ( + nth(4,ckey) + ) + <> + to_string('focus') + or + safe_list + ( + nth(6,ckey) + ) + = + list + ( + to_string(:binstr_focus_name) + , + to_string(:binstr_focus_key) + ) +*/ + +// Error drill down view, should be named "cpro_dashboard_drill_error": +/* + +select + ckey + , + cvalue + , + chash + from + cpro.cproJobError + where + safe_string + ( + hd(ckey) + ) + = + to_string(:binstr_jobname) +*/ + +// Channel drill down view, should be named "cpro_dashboard_drill_channel": +/* + +select + ckey + , + cvalue + , + chash + from + :binstr_channel + +*/ + + /** Size & positioning parameters */ + + // virtual coordinates drawing arc radius + var vArcRadius = 1000; + // node radius in virtual coordinates + var nradius = 100; + var animDuration = 500; + + // Focusname or topic for subscriptions + var topic = "shortid"; + var viewName = "cpro_dashboard"; + var focusSuffix = "_focus"; + + var initGraph = function() { + return { + links: {}, + nodes: {}, + status: {}, + errors: {}, + channelIdMap: {}, + focusList: {}, + jobIdChannelMap: {}, + center: { + // Position relative to the bottom center after margin. + CPROS: { + position: { x: -1.9 * nradius, y: 0.3 * nradius }, + status: 'idle', + system_information: {} + }, + CPROP: { + position: { x: 0, y: -nradius }, + status: 'idle', + system_information: {} + } + } + }; + } + + /** Helper functions for data extraction */ + var parseError = function(term) { + return new Error(term + " is not a valid json term"); + } + var getKey = function(row) { + var k = []; + try { + k = JSON.parse(row.ckey_1); + } catch (e) { + throw parseError(row.ckey_1); + } + return k; + }; + + var getValue = function(row) { + var v = {}; + try { + v = JSON.parse(row.cvalue_2); + } catch (e) { + throw parseError(row.cvalue_2); + } + return v; + }; + + function isNodeRunning(nodeId, links) { + var jobs; + for(var lid in links) { + if(links[lid].target === nodeId) { + jobs = links[lid].jobs; + for(var jId in jobs) { + if(jobs[jId].running === true) { + return true; + } + } + } + } + return false; + } + + var extractLinksNodes = function(rows, graph) { + var links = graph.links; + var nodes = graph.nodes; + var status = graph.status; + var center = graph.center; + var errors = graph.errors; + var channelIdMap = graph.channelIdMap; + var focusList = graph.focusList; + var jobIdChannelMap = graph.jobIdChannelMap; + + rows.forEach(function(row) { + var key = getKey(row); + var value = getValue(row); + + // All keys we are interested should have 7 elements. + if(key.length !== 7) { + // Discard any other rows. + return; + } + + if(key[5] === "job_status") { + var triangleId = key[2] + '_' + key[3] + '_' + key[4]; + var jobId = key[2] + '_' + key[3]; + if (row.op === "del") { + delete status[triangleId]; + } else { + status[triangleId] = { + id: triangleId, + job: jobId, + status: value.status + }; + } + } else if(key[5] == "error" && key[3] == "system_info") { + if (row.op === "del") { + delete center[key[2]][key[6] + "_error"]; + var still_errors = false; + var centerOtherKeys = Object.keys(center[key[2]]); + for(var kIdx = 0; kIdx < centerOtherKeys.length; ++kIdx) { + if(centerOtherKeys[kIdx].includes("_error")) { + still_errors = true; + break; + } + } + if (!still_errors) { + center[key[2]].status = "idle"; + } + } else { + console.log("Setting status error"); + center[key[2]].status = "error"; + center[key[2]][key[6] + "_error"] = value; + } + } else if(key[5] === "jobs") { + var nodeId = value.platform; + var desc = value.desc || ""; + if(value.direction !== "pull") { + channelIdMap[value.channel] = nodeId; + } + if(!center.hasOwnProperty(nodeId)) { + if(!nodes.hasOwnProperty('nodeId')) { + nodes[nodeId] = { + id: nodeId, + status: "stopped", + desc: desc + }; + } else { + nodes[nodeId].desc = desc; + } + } + var linkId = key[2] + '_' + nodeId; + var jobId = key[2] + '_' + key[3]; + jobIdChannelMap[jobId] = value.channel; + var jobs = {}; + if(links.hasOwnProperty(linkId)) { + jobs = links[linkId].jobs; + } + jobs[jobId] = { + id: jobId, + legend: key[3], + enabled: value.enabled, + running: value.running, + direction: value.direction + }; + var runningLink = false; + for(var jId in jobs) { + if(jobs[jId].running === true) { + runningLink = true; + break; + } + } + links[linkId] = { + id: linkId, + source: key[2], + target: nodeId, + running: runningLink, + jobs: jobs + }; + if(isNodeRunning(nodeId, links) && nodes[nodeId].status === "stopped") { + nodes[nodeId].status = "idle"; + } + } else if(key[3] === "focus") { + var channel = key[6][0]; + var focusValueKey = key[6][1]; + if (typeof key[6][1].join === 'function') { + focusValueKey = key[6][1].join("_"); + } + if(!focusList.hasOwnProperty(channel)) { + focusList[channel] = {}; + } + focusList[channel][focusValueKey] = value; + } else if(key[5] === "error") { + var jobId = key[2] + '_' + key[3]; + if (row.op === "del" && errors.hasOwnProperty(jobId)) { + if(key[3] === key[6]) { + errors[jobId].details = ""; + } else { + var idx = errors[jobId].ids.indexOf(key[6].toString()); + if(idx > -1) { + errors[jobId].ids.splice(idx, 1); + } + } + if(errors[jobId].ids.length === 0 && !errors[jobId].details) { + delete errors[jobId]; + } + } else { + if(!errors.hasOwnProperty(jobId)) { + errors[jobId] = { + details: "", + ids: [] + }; + } + if(key[3] === key[6]) { + errors[jobId].details = value; + } else { + if(errors[jobId].ids.indexOf(key[6].toString()) === -1) { + errors[jobId].ids.push(key[6].toString()); + } + } + } + } else if(key[5] === "system_info") { + var centerId = key[2]; + var systemIp = key[4]; + if(center[centerId]) { + if(key[6] === "node_status" && center[centerId].system_information) { + var systemInfo = {}; + for(var k in value) { + systemInfo[k] = value[k]; + } + center[centerId].system_information[systemIp] = systemInfo; + } else if(key[6] === "heartbeat") { + var collectorName = key[0]; + if(row.op !== "del" && (value.cpu_overload_count !== 0 || value.memory_overload_count !== 0 || value.eval_crash_count !== 0)) { + center[centerId].status = "error"; + var ovErr = {}; + if(!center[centerId].overload_error) { + ovErr[collectorName] = {}; + ovErr[collectorName][systemIp] = value; + center[centerId].overload_error = ovErr; + } else if (!center[centerId].overload_error[collectorName]) { + ovErr[systemIp] = value; + center[centerId].overload_error[collectorName] = ovErr; + } else { + center[centerId].overload_error[collectorName][systemIp] = value; + } + } else { + // TODO: Is there a simple way of doing this ? + if(center[centerId].overload_error && center[centerId].overload_error[collectorName] && center[centerId].overload_error[collectorName][systemIp]) { + delete center[centerId].overload_error[collectorName][systemIp]; + if(Object.keys(center[centerId].overload_error[collectorName]).length === 0) { + delete center[centerId].overload_error[collectorName]; + if(Object.keys(center[centerId].overload_error).length === 0) { + delete center[centerId].overload_error; + var still_errors = false; + var centerOtherKeys = Object.keys(center[centerId]); + for(var kIdx = 0; kIdx < centerOtherKeys.length; ++kIdx) { + if(centerOtherKeys[kIdx].includes("_error")) { + still_errors = true; + break; + } + } + if (!still_errors) { + center[centerId].status = "idle"; + } + } + } + } + } + } + } + } + }); + + // Reset the status as errors could have been deleted + for(var nid in nodes) { + if(nodes[nid].status === "error" || (isFocus && nodes[nid].running_status !== undefined)) { + nodes[nid].status = nodes[nid].running_status; + } + } + + console.log("the errors", errors); + for(var lid in links) { + var linknode = links[lid].target; + if(nodes.hasOwnProperty(linknode)) { + for(var jId in links[lid].jobs) { + if(errors.hasOwnProperty(jId)) { + nodes[linknode][jId + "_error"] = errors[jId]; + if(nodes[linknode].status !== "error") { + nodes[linknode].running_status = nodes[linknode].status; + nodes[linknode].status = "error"; + } + } else { + delete nodes[linknode][jId + "_error"]; + } + } + } else { + console.log("link without node found", linknode); + } + } + + // Grey out all the platforms before appending focus data + if(isFocus) { + for(nid in nodes) { + if(nodes[nid].status !== "error") { + nodes[nid].running_status = nodes[nid].status; + nodes[nid].status = "stopped"; + } + } + } + + // Appending focus to the correct platform + console.log("The focus list", focusList); + console.log("the channel id map", channelIdMap); + for(var channel in channelIdMap) { + if(focusList.hasOwnProperty(channel)) { + var nid = channelIdMap[channel]; + if(!nodes[nid].status !== "error") { + nodes[nid].status = nodes[nid].running_status; + } + for(var focusValueKey in focusList[channel]) { + nodes[nid][focusValueKey] = focusList[channel][focusValueKey]; + } + } + } + + for(var sid in status) { + status[sid].channel = jobIdChannelMap[status[sid].job]; + } + + return {links: links, nodes: nodes, status: status, center: center, errors: errors, channelIdMap: channelIdMap, focusList: focusList, jobIdChannelMap: jobIdChannelMap}; + }; + /** End data extraction functions */ + + // Add the focus input + var focusDiv = container + .append('div') + .style('position', 'absolute') + .style("margin", "10px 0px 0px 10px"); + + var inputId = "shortid_" + Math.random().toString(36).substr(2, 14); + + var label = focusDiv + .append('label') + .attr("for", inputId) + .text("Shortid: "); + + var focusInput = focusDiv + .append('input') + .attr("id", inputId) + .style("border", "solid 1px black") + .style("border-radius", "3px") + .style("padding", "1px 0px 1px 4px"); + + var graph = initGraph(); + var firstData = true; + var isFocus = false; + + focusInput.on("keypress", function() { + if(d3.event.keyCode == 13) { + var inp = focusInput.node(); + console.log("the value", inp.value); + // TODO: Do not call if it is the same registration again ? or maybe for refresh ? + var params = { + ':binstr_focus_name' : {typ: "binstr", val: topic}, + ':binstr_focus_key' : {typ: "binstr", val: inp.value} + } + helper.req(viewName, focusSuffix, topic, inp.value, params, function() { + svg.selectAll('svg > *').remove(); + graph = initGraph(); + firstData = true; + if (inp.value) { + isFocus = true; + } else { + isFocus = false; + } + }); + } + }) + + var margin = { top: 10, right: 10, bottom: 10, left: 10 }; // physical margins in px + + var colorStatus = { + idle: 'green', + error: 'red', + synced: 'yellow', + cleaning: 'lightsteelblue', + cleaned: 'blue', + refreshing: 'cornflowerblue', + refreshed: 'purple', + stopped: 'lightgrey' + }; + // To see the complete circle when drawing negative coordinates + // and width and height for the virtual coordinates + var vBox = { + x: -1 * (vArcRadius + nradius), + y: -1 * (vArcRadius + nradius), + w: vArcRadius * 2 + nradius * 2, + h: vArcRadius + 3 * nradius + }; + + var svg = container + .append('svg') + .attr('viewBox', vBox.x + ' ' + vBox.y + ' ' + vBox.w + ' ' + vBox.h) + .attr('preserveAspectRatio', 'xMidYMax meet') + .style('margin-top', margin.top + 'px') + .style('margin-right', margin.right + 'px') + .style('margin-bottom', margin.bottom + 'px') + .style('margin-left', margin.left + 'px'); + + function resize(w, h) { + var cheight = h - (margin.top + margin.bottom); + var cwidth = w - (margin.left + margin.right); + svg.attr('width', cwidth) + .attr('height', cheight); + } + + resize(width, height); + + var tooltipDiv = d3.select("body").append('div') + .styles({ + position: "absolute", + "text-align": "left", + padding: "2px", + font: "12px courier", + border: "0px", + "border-radius": "8px", + "pointer-events": "none", + opacity: 0, + "z-index": 99996, + "background-color": "lightgrey", + "max-width": "calc(100% - 10px)", + "max-height": "calc(100% - 40px)", + overflow: "hidden" + }); + + function tooltipStringifyFilter(name, val) { + if(name === "position"){ + return undefined; + } + return val; + } + + function showTooltip(d) { + var html = formatJSON(JSON.stringify(d, tooltipStringifyFilter, 2), true); + apply_transition(tooltipDiv.html(html), 200).style('opacity', 0.95); + } + + function formatJSON(json, preformatted) { + json = json.replace(/&/g, '&').replace(//g, '>'); + var result = json.replace(/("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?)/g, function (match) { + var color = 'brown'; // number + if (/^"/.test(match)) { + if (/:$/.test(match)) { + match = match.slice(1, -2) + ":"; + color = 'blue'; // key + } else { + color = 'green'; // string + } + } else if (/true|false/.test(match)) { + color = 'magenta'; // boolean + } else if (/null/.test(match)) { + color = 'red'; //null + } + return '' + match + ''; + }); + if(preformatted) { + result = "
" + result + "
"; + } + return result; + } + + function moveTooltip() { + // Position the tooltip without letting it go outside the window. + var availableHeight = document.documentElement.clientHeight; + var availableWidth = document.documentElement.clientWidth; + + var d = tooltipDiv.node(); + var tooltipHeight = d.scrollHeight; + var tooltipWidth = d.scrollWidth; + + var left = d3.event.pageX + 15; + if(left + tooltipWidth + 5 > availableWidth) { + left = Math.max(availableWidth-tooltipWidth-5, 5); + } + var top = d3.event.pageY + 15; + if(top + tooltipHeight + 5 > availableHeight) { + top = Math.max(availableHeight-tooltipHeight-5, 30); + } + + tooltipDiv + .style('left', left + "px") + .style('top', top + "px"); + } + + function hideTooltip() { + apply_transition(tooltipDiv, animDuration).style('opacity', 0); + } + + function openErrorView(d) { + var found = false; + var drillKey = ""; + var sp; + for(var k in d) { + sp = k.split("_"); + if(sp[2] === "error") { + drillKey = sp[1]; + found = true; + break; + } + } + if(found) { + helper.browse('cpro_dashboard_drill_error', { + ':binstr_jobname' : {typ: "binstr", val: drillKey} + }); + } + } + + function openChannelView(d) { + if(d.channel) { + helper.browse('cpro_dashboard_drill_channel', { + ':binstr_channel': {typ: "binstr", val: d.channel} + }); + } + } + + function entries(obj) { + var res = []; + for(var k in obj) { + obj[k].id = k; + res.push(obj[k]); + } + return res; + } + + function splitText(text) { + var text = text.split(":").pop(); + return text.split(/\s+/); + /* Disabled until we have grouping + if(text.length < 10) { + return [text]; + } else { + + var words = text.split(/\s+/); + var lines = [words[0]]; + var maxLengthLine = 8; + // TODO: try to do this dynamic, lines on the top are smaller than lines on the bottom, how can we divide the lines to represent that ? + for(var i = 1, j = 0; i < words.length; ++i) { + if(lines[j].length + words[i].length <= maxLengthLine) { + lines[j] += ' ' + words[i]; + } else { + j += 1; + lines[j] = words[i]; + } + } + return lines; + } + */ + } + + function apply_transition(d3Obj, duration) { + if (!document.hidden) { + return d3Obj.transition().duration(duration); + } + return d3Obj; + } + + function contextMenu(d) { + d3.event.preventDefault(); + var menuSpec = [ + { + label: "Show details", + icon: "external-link", + cb: function(evt) { + var pos = {x: evt.pageX - 25, y: evt.pageY - 50} + var html = formatJSON(JSON.stringify(d, tooltipStringifyFilter, 2), true); + helper.openDialog(splitText(d.id).join(" "), html, pos); + } + } + ]; + var pos = {x: d3.event.pageX - 15, y: d3.event.pageY - 20}; + helper.contextMenu(menuSpec, pos); + } + + return { + on_data: function(data) { + if(data.length === 0) { + return; + } + + if(firstData) { + firstData = false; + + // Add center nodes + var NewCenterNodesDom = svg.selectAll('.center-nodes') + .data(entries(graph.center), function(d) { + return d.id; + }) + .enter() + .append('g') + .attr("class", "center-nodes") + .on('mouseover', showTooltip) + .on('mousemove', moveTooltip) + .on('mouseout', hideTooltip) + .on("contextmenu", contextMenu); + + NewCenterNodesDom.append('circle') + .attr('r', nradius) + .attr('cx', function(d) { return d.position.x; }) + .attr('cy', function(d) { return d.position.y; }) + .attr('id', function(d) { return d.id; }) + .style('fill', function(d) { + return colorStatus[d.status]; + }); + + NewCenterNodesDom.append('text') + .text(function(d) { + return d.id; + }) + .attr('text-anchor', 'middle') + .style('font-size', '28px'); + } + + graph = extractLinksNodes(data, graph); + + // Note: entries adds the id to the values in the original object + var nodes = entries(graph.nodes); + var links = entries(graph.links); + var status = entries(graph.status); + var center = entries(graph.center); + + var numberNodes = nodes.length; + vArcRadius = Math.max((numberNodes*nradius*1.8)/Math.PI, 1000); + // Calculate number of jobs to be able to fit all triangles into a link. + var maxJobsCount = 0; + links.forEach(function(link) { + var currentCount = Object.keys(link.jobs).length; + if(currentCount > maxJobsCount) { + maxJobsCount = currentCount; + } + }); + // 40 is the height of a status job arrow (+2 for borders). + vArcRadius = Math.max(maxJobsCount * 42 + 5 * nradius, vArcRadius); + + vBox = { + x: -1 * (vArcRadius + nradius), + y: -1 * (vArcRadius + nradius), + w: vArcRadius * 2 + nradius * 2, + h: vArcRadius + 3 * nradius + } + svg.attr('viewBox', vBox.x + ' ' + vBox.y + ' ' + vBox.w + ' ' + vBox.h); + + console.log("center", center); + + svg.selectAll('.center-nodes') + .data(center, function(d) { + return d.id; + }) + .select('circle') + .style('fill', function(d) { + return colorStatus[d.status]; + }); + + var newNodes = svg.selectAll('.node') + .data(nodes, function(d) { + return d.id; + }) + .enter() + .append('g') + .attr("class", "node") + .on("contextmenu", contextMenu); + + newNodes.append('circle') + .attr('r', nradius) + .attr('id', function(d) { + return d.id; + }) + .on('mouseover', showTooltip) + .on('mousemove', moveTooltip) + .on('mouseout', hideTooltip) + .on('click', openErrorView); + + newNodes.append('text') + .style('font-size', '28px') + .attr('text-anchor', 'middle') + .on('mouseover', showTooltip) + .on('mousemove', moveTooltip) + .on('mouseout', hideTooltip) + .each(function(d) { + var textNode = d3.select(this); + var words = splitText(d.id); + var dyStart = Math.ceil(words.length/2) - 1; + for(var i = 0; i < words.length; ++i) { + textNode + .append('tspan') + .attr('dy', i === 0? -dyStart + 'em':'1em') + .text(words[i]); + } + + }); + + var allPoints = svg.selectAll('g') + .filter(function(d) { + return !d.position; + }) + .select('circle'); + + var nData = []; + svg.selectAll('.node').each(function (d) { + nData.push(d); + }); + + nData.sort(function(a, b) { + return a.id.localeCompare(b.id); + }); + + var angle = Math.PI / (nData.length + 1); + + var positions = {}; + for(var i = 0; i < nData.length; ++i) { + var r = vArcRadius - 2 * nradius * (i % 2); + var x = -r * Math.cos((i + 1) * angle); + var y = -r * Math.sin((i + 1) * angle); + positions[nData[i].id] = {x: x, y: y}; + } + // Append center node positions. + for(var k in graph.center) { + positions[k] = graph.center[k].position; + } + + apply_transition(allPoints, animDuration) + .attr('cx', function(d) { + return positions[d.id].x; + }) + .attr('cy', function(d) { + return positions[d.id].y; + }) + .style('fill', function(d) { + return colorStatus[d.status]; + }) + + apply_transition(svg.selectAll('text'), animDuration) + .attr('x', function(d) { + return positions[d.id].x; + }) + .attr('y', function(d) { + return positions[d.id].y; + }); + + apply_transition(svg.selectAll('tspan'), animDuration) + .attr('x', function(d) { + return positions[d.id].x; + }); + + svg.selectAll('line') + .data(links, function(d) { + return d.id; + }) + .enter() + .insert('line', '.center-nodes') + .attr('stroke-width', 6) + .attr('id', function(d) { + return d.id; + }) + .on('mouseover', showTooltip) + .on('mousemove', moveTooltip) + .on('mouseout', hideTooltip) + .on('contextmenu', contextMenu); + + + // Adding connecting links + var allLinks = svg.selectAll('line'); + + apply_transition(allLinks, animDuration) + .attr('x1', function(d) { + if(!positions[d.source]) { return 0; } + return positions[d.source].x; + }) + .attr('y1', function(d) { + if(!positions[d.source]) { return 0; } + return positions[d.source].y; + }) + .attr('x2', function(d) { + if(!positions[d.target]) { return 0; } + return positions[d.target].x; + }) + .attr('y2', function(d) { + if(!positions[d.target]) { return 0; } + return positions[d.target].y; + }) + .attr('stroke', function(d) { + var jobsId = Object.keys(d.jobs); + for(var i = 0; i < status.length; ++i) { + if(d.jobs.hasOwnProperty(status[i].job)) { + if(status[i].status == "error") { + return 'red'; + } + } + } + return (d.running === true) ? 'green' : 'lightgrey'; + }); + + var linksMid = {}; + allLinks.each(function(d) { + if(!positions[d.source] || !positions[d.target]) { + return; + } + var dirX = positions[d.source].x - positions[d.target].x; + var dirY = positions[d.source].y - positions[d.target].y; + var dirM = Math.sqrt(dirX*dirX + dirY*dirY); + if(dirM > 0.01) { + dirX /= dirM; + dirY /= dirM; + } else { + dirX = 0; + dirY = -1; + } + + // So we are in the center + var jobsId = Object.keys(d.jobs); + + var pullJobs = []; + var pushJobs = []; + for(var i = 0; i < jobsId.length; ++i) { + if(d.jobs[jobsId[i]].direction === "pull") { + pullJobs.push(d.jobs[jobsId[i]]); + } else { + pushJobs.push(d.jobs[jobsId[i]]); + } + } + + // TODO: How to merge this two loops in one function... + var dir = {x: dirX, y: dirY}; + for(var i = 0; i < pushJobs.length; ++i) { + var midX = 0.5 * positions[d.source].x + 0.5 * positions[d.target].x; + var midY = 0.5 * positions[d.source].y + 0.5 * positions[d.target].y; + linksMid[pushJobs[i].id] = {mid: {x: midX, y: midY}, direction: dir, link: d.id, pull: false}; + } + + dir = {x: -dirX, y: -dirY}; + for(var i = 0; i < pullJobs.length; ++i) { + var midX = 0.5 * positions[d.source].x + 0.5 * positions[d.target].x; + var midY = 0.5 * positions[d.source].y + 0.5 * positions[d.target].y; + linksMid[pullJobs[i].id] = {mid: {x: midX, y: midY}, direction: dir, link: d.id, pull: true}; + } + }); + console.log("The link mids", linksMid); + + var groupStatus = {}; + status.forEach(function(s) { + if(linksMid[s.job]) { + var linkId = linksMid[s.job].link; + if(!groupStatus.hasOwnProperty(linkId)) { + // Array to sort properly pullers and pushers along the links + groupStatus[linkId] = []; + } + if(linksMid[s.job].pull) { + groupStatus[linkId].push(s.id); + } else { + groupStatus[linkId].unshift(s.id); + } + } + }); + + var statusPos = {}; + for(var linkId in groupStatus) { + for(var i = 0; i < groupStatus[linkId].length; ++i) { + statusPos[groupStatus[linkId][i]] = i; + } + } + + var polySelection = svg.selectAll('polygon') + .data(status, function(d) { + return d.id; + }); + + polySelection.exit().remove(); + + polySelection.enter() + .append('polygon') + .attr('id', function(d) { + return d.id; + }) + .attr('points', '0,0 -15,40 15,40') + .on('mouseover', showTooltip) + .on('mousemove', moveTooltip) + .on('mouseout', hideTooltip) + .on('click', openChannelView) + .on('contextmenu', contextMenu); + + apply_transition(svg.selectAll('polygon'), animDuration) + .attr('transform', function(d) { + if(!linksMid[d.job]) { + console.log("Moving outside the visible area as we don't have a position yet", JSON.stringify(d.job)); + return "translate(0, 250) rotate(180)" + } else { + var linkId = linksMid[d.job].link; + // Trying to center the triangles along the link by shifting by half. + var shifted = 0.5 * (groupStatus[linkId].length - 1); + var dir = 1; + + if(!linksMid[d.job].pull) { + // Depending in the direction we would like to move up or down. + var dir = -1; + // Due to rotation we need to move the triangle up a little. + shifted -= 1; + } + + var dx = linksMid[d.job].direction.x; + var dy = linksMid[d.job].direction.y; + var angle = -1 * Math.atan2(dx, dy) * 180 / Math.PI; + + var x = linksMid[d.job].mid.x + (statusPos[d.id] - shifted) * dx * 40 * dir; + var y = linksMid[d.job].mid.y + (statusPos[d.id] - shifted) * dy * 40 * dir; + return "translate(" + x + ", " + y + ") rotate(" + angle + ")"; + } + }) + .style('stroke', 'black') + .style('stroke-width', 3) + .style('fill', function(d) { + return colorStatus[d.status]; + }); + }, + on_resize: resize, + on_reset: function() { + svg.selectAll('svg > *').remove(); + graph = initGraph(); + firstData = true; + }, + on_close: function() { + tooltipDiv.remove(); + } + }; +} diff --git a/rebar.config b/rebar.config index 6ce0af19..6893acfc 100644 --- a/rebar.config +++ b/rebar.config @@ -42,7 +42,7 @@ {esaml, {git, "https://github.com/K2InformaticsGmbH/esaml", {tag, "2.3.0"}}}, {imem, {git, "https://bitbucket.org/konnexions/imem", {tag, "3.9.0"}}}, {oranif, {git, "https://github.com/c-bik/oranif", {branch, "master"}}}, - {prometheus, "v4.5.0"} + {prometheus, "4.5.0"} ]}. {deps_error_on_conflict, false}. diff --git a/src/dderl_sup.erl b/src/dderl_sup.erl index d9dcd2a5..0d03249a 100644 --- a/src/dderl_sup.erl +++ b/src/dderl_sup.erl @@ -34,10 +34,18 @@ start_link() -> %% =================================================================== init([]) -> - {ok, {#{strategy => one_for_one, intensity => 5, period => 10}, - [?CHILD(dderl_dal, worker, []), - ?CHILD(dderl_rest, worker, []), - ?CHILD(dderl_metrics, worker, []), - ?CHILD(dderl_session_sup, supervisor, []), - ?CHILD(dderl_data_sender_sup, supervisor, []), - ?CHILD(dderl_data_receiver_sup, supervisor, [])]}}. + { + ok, + { + #{strategy => one_for_one, intensity => 5, period => 10}, + [ + ?CHILD(dderl_dal, worker, []), + ?CHILD(dderl_rest, worker, []), + ?CHILD(dderl_metrics, worker, []), + ?CHILD(dderl_session_sup, supervisor, []), + ?CHILD(dderl_data_sender_sup, supervisor, []), + ?CHILD(dderl_data_receiver_sup, supervisor, []), + ?CHILD(dperl_sup, supervisor, []) + ] + } + }. diff --git a/src/dperl.hrl b/src/dperl.hrl new file mode 100644 index 00000000..03f08875 --- /dev/null +++ b/src/dperl.hrl @@ -0,0 +1,316 @@ +-ifndef(_dperl_HRL_). +-define(_dperl_HRL_, true). + +-define(LOG_TAG, "_dperl_"). +-include_lib("dderl/src/dderl.hrl"). + +-type plan() :: at_most_once|at_least_once|on_all_nodes. + +-define(TABLESPEC(__T,__O), + {__T, record_info(fields, __T), ?__T, #__T{}, __O}). +-define(TABLESPEC(__TA,__T,__O), + {__TA, record_info(fields, __T), ?__T, #__T{}, __O}). + +-define(NOT_FOUND, '$notfound'). + +-record(dperlJob, { + name :: binary(), + module :: atom(), + args :: any(), + srcArgs :: any(), + dstArgs :: any(), + enabled :: true|false, + running :: true|false, + plan :: plan(), + nodes :: [atom()], + opts = [] :: list() + }). +-define(dperlJob, [binstr,atom,term,term,term,boolean,atom,atom,list,list]). + +-define(JOBDYN_TABLE, 'dperlNodeJobDyn@'). +-record(dperlNodeJobDyn, { + name :: binary(), % same as dperlJob.name + state :: map(), + status :: atom(), + statusTime :: ddTimestamp() + }). +-define(dperlNodeJobDyn, [binstr,map,atom,timestamp]). + +-record(dperlService, { + name :: binary(), + module :: atom(), + args :: any(), + resource :: any(), + interface :: any(), + enabled :: true|false, + running :: true|false, + plan :: plan(), + nodes :: [atom()], + opts = [] :: list() + }). +-define(dperlService, [binstr,atom,term,term,term,boolean,atom,atom,list,list]). + +-define(SERVICEDYN_TABLE, 'dperlServiceDyn@'). +-record(dperlServiceDyn, { + name :: binary(), % same as dperlService.name + state :: map(), + status :: atom(), + statusTime :: ddTimestamp() + }). +-define(dperlServiceDyn, [binstr,map,atom,timestamp]). + +-define(G(__JS,__F), + if is_record(__JS, dperlJob) -> (__JS)#dperlJob.__F; + is_record(__JS, dperlService) -> (__JS)#dperlService.__F; + true -> error({badarg, __JS}) + end). +-define(S(__JS,__F,__V), + if is_record(__JS, dperlJob) -> (__JS)#dperlJob{__F = __V}; + is_record(__JS, dperlService) -> (__JS)#dperlService{__F = __V}; + true -> error({badarg, __JS}) + end). +-define(RC(__JS), + if is_record(__JS, dperlJob) -> job; + is_record(__JS, dperlService) -> service; + true -> error({badarg, __JS}) + end). +-define(SUP(__JS), + if is_record(__JS, dperlJob) -> dperl_job_sup; + is_record(__JS, dperlService) -> dperl_service_sup; + true -> error({badarg, __JS}) + end). + +-define(EPOCH, {0,0}). + +-define(TOAC_KEY_INDEX_ID, 1). + +-define(GET_RPC_TIMEOUT, + ?GET_CONFIG(rpcTimeout, [], 2000, + "Max timeout in millisecond for a rpc call") + ). + +-define(GET_WORKER_CHECK_INTERVAL(__Type), + case __Type of + job -> + ?GET_CONFIG(jobCheckInterval, [], 1000, + "Interval in millisecond between job configuration" + " checks"); + service -> + ?GET_CONFIG(serviceCheckInterval, [], 5000, + "Interval in millisecond between service configuration" + " checks") + end + ). + +-define(GET_CLUSTER_CHECK_INTERVAL(__Type), + ?GET_CONFIG(clusterCheckInterval, [__Type], 200, + "Interval in milliseconds between cluster checks") + ). + +-define(GET_LINK_RETRY_INTERVAL(__MODULE, __JOB_NAME), + ?GET_CONFIG(linkRetryInterval, [__MODULE, __JOB_NAME], 2000, + "Retry interval in millisecond of a broken link") + ). + +-define(GET_FOCUS(__KEY), + ?GET_CONFIG({focus, __KEY}, [], #{}, + "Channel to key transformation function mapping") + ). + +-define(GET_IGNORED_JOBS_LIST, + ?GET_CONFIG(ignoredJobs, [], [], + "List of job names that will be ignored by metrics" + " and not presented in dashboards") + ). + +-define(CONNECT_CHECK_IMEM_LINK(__State), + (fun(#state{active_link = _ActiveLink, links = _Links, + cred = #{user := _User, password := _Password}, + imem_sess = _OldSession} = _State) -> + #{schema := _Schema} = lists:nth(_ActiveLink, _Links), + case catch _OldSession:run_cmd(schema, []) of + _Schema -> {ok, _State}; + _ -> + catch _OldSession:close(), + case dperl_dal:connect_imem_link( + _ActiveLink, _Links, _User, _Password) of + {ok, _Session, _Pid} -> + {ok, _State#state{imem_sess = _Session, + imem_connected = true}}; + {_Error, _NewActiveLink} -> + {error, _Error, + _State#state{active_link = _NewActiveLink, + imem_connected = false}} + end + end + end)(__State)). + +-define(CYCLE_ALWAYS_WAIT(__MODULE, __JOB_NAME), + ?GET_CONFIG(cycleAlwaysWait,[__MODULE, __JOB_NAME], 1000, + "Delay in millisecond before restarting the cycle," + " if current cycle is not idle and cycle is not failed") + ). + +-define(CYCLE_IDLE_WAIT(__MODULE, __JOB_NAME), + ?GET_CONFIG(cycleIdleWait, [__MODULE, __JOB_NAME], 3000, + "Delay in millisecond before restarting the cycle," + " if current cycle was idle") + ). + +-define(CYCLE_ERROR_WAIT(__MODULE, __JOB_NAME), + ?GET_CONFIG(cycleErrorWait, [__MODULE, __JOB_NAME], 5000, + "Delay in millisecond before restarting the cycle," + " if current cycle has failed") + ). + +-define(CLEANUP_INTERVAL(__MODULE, __JOB_NAME), + ?GET_CONFIG(cleanupInterval, [__MODULE, __JOB_NAME], 100000, + "Delay in millisecond between any cleanup success" + " end time and the next cleanup attempt start time") + ). + +-define(REFRESH_INTERVAL(__MODULE, __JOB_NAME), + ?GET_CONFIG(refreshInterval, [__MODULE, __JOB_NAME], 10000000, + "Delay in millisecond between any refresh success" + " end time and the next cleanup attempt start time") + ). + +-define(CLEANUP_BATCH_INTERVAL(__MODULE, __JOB_NAME), + ?GET_CONFIG(cleanupBatchInterval, [__MODULE, __JOB_NAME], 2000, + "Delay in millisecond between cleanups whith in a" + " complete cleanup cycle") + ). + +-define(REFRESH_BATCH_INTERVAL(__MODULE, __JOB_NAME), + ?GET_CONFIG(refreshBatchInterval, [__MODULE, __JOB_NAME], 10000, + "Delay in millisecond between refreshes whith in a" + " refresh cleanup cycle") + ). + +-define(REFRESH_HOURS(__MODULE, __JOB_NAME), + ?GET_CONFIG(refreshHours, [__MODULE, __JOB_NAME], [], + "List of hour(s) of the day when refresh should happen." + " [] for every time possible") + ). + +-define(MAX_BULK_COUNT(__MODULE, __JOB_NAME), + ?GET_CONFIG(maxBulkCount, [__MODULE, __JOB_NAME], 100, + "Max count for each bulk") + ). + +-define(MAX_CLEANUP_BULK_COUNT(__MODULE, __JOB_NAME), + ?GET_CONFIG(maxCleanupBulkCount, [__MODULE, __JOB_NAME], 100, + "Max count for each cleanup bulk") + ). + +-define(MAX_REFRESH_BULK_COUNT(__MODULE, __JOB_NAME), + ?GET_CONFIG(maxRefreshBulkCount, [__MODULE, __JOB_NAME], 100, + "Max count for each refresh bulk") + ). + +-define(KPI_CLEANUP_TIME(__JOB_NAME), + ?GET_CONFIG(kpiCleanupTime, [__JOB_NAME], 30, + "Number of minutes after which a kpi record can be " + "cleaned out") * 60 + ). + +-define(JOB_DOWN_MOD_EXCLUSIONS, + ?GET_CONFIG(jobDownModExclusions, [], [], + "List of job modules to be exculded from job down count") + ). + +-define(JOB_DOWN_NAME_EXCLUSIONS, + ?GET_CONFIG(jobDownNameExclusions, [], [], + "List of job names to be exculded from job down count") + ). + +-define(JOB_ERROR_MOD_EXCLUSIONS, + ?GET_CONFIG(jobErrorModExclusions, [], [], + "List of job modules to be exculded from job error count") + ). + +-define(JOB_ERROR_NAME_EXCLUSIONS, + ?GET_CONFIG(jobErrorNameExclusions, [], [], + "List of job names to be exculded from job error count") + ). + +-define(JOB_DYN_STATUS_TIMEOUT, + ?GET_CONFIG(jobDynStatusTimeout, [], 60000, + "Time in milliseconds after which status is considered" + " stale") * 1000 + ). + +-define(JOB_DESCRIPTIONS, + ?GET_CONFIG(jobDescriptions, [], #{}, + "Map for specifying the description of platforms for the" + " dperl dashboard") + ). + +-define(JOB_ERROR, <<"dperlJobError">>). + +-define(SESSION_CLOSE_ERROR_CODES(__MODULE), + ?GET_CONFIG(oraSessionCloseErrorCodes, [__MODULE, get(name)], [28,3113,3114,6508], + "List of oracle error codes on which session is recreated") + ). + +-define(STMT_CLOSE_ERROR_CODES(__MODULE), + ?GET_CONFIG(oraStmtmCloseErrorCodes, [__MODULE, get(name)], [4069,4068], + "List of oracle error codes on which stmts are recreated") + ). + +-ifndef(TEST). + +-define(STATUS_INTERVAL, + ?GET_CONFIG(activityStatusInterval, [get(name)], 60000, + "Delay in millisecond before writing activity status in " + "status dir") + ). + +-define(STATUS_FILE, + ?GET_CONFIG(activityStatusFile, [get(name)], "ServiceActivityLog.sal", + "File name of activity status log") + ). + +-else. + +-define(STATUS_INTERVAL, 1000). + +-define(STATUS_FILE, "ServiceActivityLog.sal"). + +-endif. + +-define(ST, erlang:get_stacktrace()). + +% Job logger interfaces +-define(JInfo(__F,__A), ?Info ([{enum,get(jname)}],"[~p] "__F,[get(jstate)|__A])). +-define(JWarn(__F,__A), ?Warn ([{enum,get(jname)}],"[~p] "__F,[get(jstate)|__A])). +-define(JDebug(__F,__A), ?Debug([{enum,get(jname)}],"[~p] "__F,[get(jstate)|__A])). +-define(JError(__F,__A), ?Error([{enum,get(jname)}],"[~p] "__F,[get(jstate)|__A], [])). +-define(JError(__F,__A,__S), ?Error([{enum,get(jname)}],"[~p] "__F,[get(jstate)|__A],__S)). +-define(JTrace(__F,__A), + case get(debug) of + true -> ?JInfo(__F,__A); + _ -> no_op + end). + +-define(JInfo(__F), ?JInfo (__F,[])). +-define(JWarn(__F), ?JWarn (__F,[])). +-define(JError(__F), ?JError(__F,[])). +-define(JDebug(__F), ?JDebug(__F,[])). +-define(JTrace(__F), ?JTrace(__F,[])). + +-define(NODE, atom_to_list(node())). + +% Service logger interfaces +-define(SInfo(__F,__A), ?Info ([{enum,get(jname)}],__F,__A)). +-define(SWarn(__F,__A), ?Warn ([{enum,get(jname)}],__F,__A)). +-define(SDebug(__F,__A), ?Debug([{enum,get(jname)}],__F,__A)). +-define(SError(__F,__A), ?Error([{enum,get(jname)}],__F,__A, [])). +-define(SError(__F,__A,__S), ?Error([{enum,get(jname)}],__F,__A,__S)). + +-define(SInfo(__F), ?SInfo (__F,[])). +-define(SWarn(__F), ?SWarn (__F,[])). +-define(SError(__F), ?SError(__F,[])). +-define(SDebug(__F), ?SDebug(__F,[])). + +-endif. %_dperl_HRL_ diff --git a/src/dperl_cp.erl b/src/dperl_cp.erl new file mode 100644 index 00000000..9f8b4436 --- /dev/null +++ b/src/dperl_cp.erl @@ -0,0 +1,287 @@ +-module(dperl_cp). +-behaviour(gen_server). + +-include("dperl.hrl"). + +% gen_server exports +-export([start_link/1, init/1, terminate/2, handle_call/3, handle_cast/2, + handle_info/2, code_change/3]). + +-record(state, {type, last_seen_nodes = []}). + +start_link(Type) when Type == service; Type == job -> + Name = list_to_atom(lists:concat(["dperl_", Type, "_cp"])), + case gen_server:start_link({local, Name}, ?MODULE, [Type], []) of + {ok, Pid} -> + ?Info("~p started!", [Name]), + {ok, Pid}; + {error, Error} -> + ?Error("starting ~p: ~p", [Name, Error]), + {error, Error}; + Other -> + ?Error("starting ~p: ~p", [Name, Other]), + Other + end. + +init([Type]) -> + process_flag(trap_exit, true), + case Type of + job -> + ok = dperl_dal:subscribe({table, dperlJob, detailed}); + service -> + ok = dperl_dal:subscribe({table, dperlService, detailed}) + end, + imem_dal_skvh:create_check_channel(?JOB_ERROR, [audit]), + erlang:send_after(?GET_WORKER_CHECK_INTERVAL(Type), self(), check_workers), + {ok, #state{type = Type}}. + +handle_call(Request, _From, State) -> + ?Error("unsupported call ~p", [Request]), + {reply,badarg,State}. + +handle_cast(Request, State) -> + ?Error("unsupported cast ~p", [Request]), + {noreply,State}. + +-define(MTE(__Rest), {mnesia_table_event,__Rest}). +%% schema deletion event +handle_info(?MTE({delete,schema,{schema,RecType},_,_}), State) + when RecType == dperlJob; RecType == dperlService -> + ?Info("stop all jobs/services"), + [catch dperl_worker:stop(Mod, Name) || {Mod, Name} <- dperl_worker:list(State#state.type)], + {noreply, State}; +%% handles deleteion of a record and if the job was supposed to be running on this node then +%% job is stopped if not ignored +handle_info(?MTE({delete,JSM,{JSM,_},[JobOrService],_}), State) + when (is_record(JobOrService, dperlJob) andalso JSM == dperlJob) orelse + (is_record(JobOrService, dperlService) andalso JSM == dperlService) -> + case check_plan(JobOrService) of + true -> + ?Debug("stop ~p_~s", [?G(JobOrService,module), ?G(JobOrService,name)]), + stop(JobOrService); + false -> + no_op + end, + {noreply, State}; +%% handles record update or new record insert. Checks if the record changes +%% require a restart, cold start or stopping the job/service +handle_info(?MTE({write,JSM,JobOrService, OldJobOrServices, _}), State) + when (is_record(JobOrService, dperlJob) andalso JSM == dperlJob) orelse + (is_record(JobOrService, dperlService) andalso JSM == dperlService) -> + NewEnabled = ?G(JobOrService, enabled), + {OldEnabled, IsEqual} = + case OldJobOrServices of + [] -> {false, false}; %% new record insert + [OldJobOrService | _] -> + %% comparing old and new job or service configuration excluding running column value + IsSame = JobOrService == ?S(OldJobOrService, running, ?G(JobOrService, running)), + {?G(OldJobOrService, enabled), IsSame} + end, + case {NewEnabled, OldEnabled, IsEqual} of + {true, true, false} -> stop(JobOrService); + {A, A, _} -> no_op; + {false, true, _} -> stop(JobOrService); + {true, false, _} -> + case check_plan(JobOrService) of + true -> + stop(JobOrService), + start(JobOrService); + false -> no_op + end + end, + {noreply, State}; +handle_info(check_workers, #state{type = Type, last_seen_nodes = LNodes} = State) -> + case dperl_dal:data_nodes() of + LNodes -> + check_workers(Type, LNodes), + erlang:send_after(?GET_WORKER_CHECK_INTERVAL(Type), self(), check_workers), + {noreply, State}; + SeenNodes -> + log_nodes_status(SeenNodes -- LNodes, Type, "Node added"), + log_nodes_status(LNodes -- SeenNodes, Type, "Node disappeared"), + erlang:send_after(?GET_CLUSTER_CHECK_INTERVAL(Type), self(), check_workers), + {noreply, State#state{last_seen_nodes = SeenNodes}} + end; +handle_info(Info, State) -> + ?Error("unsupported info ~p", [Info]), + {noreply,State}. + +terminate(Reason, _State) when Reason == normal; Reason == shutdown -> + ?Info("shutdown"), + {ok, _} = dperl_dal:unsubscribe({table, dperlJob, detailed}), + {ok, _} = dperl_dal:unsubscribe({table, dperlService, detailed}); +terminate(Reason, State) -> ?Error("crash ~p : ~p", [Reason, State]). + +code_change(_OldVsn, State, _Extra) -> {ok, State}. + +check_workers(Type, DNodes) when is_atom(Type), is_list(DNodes) -> + JobsORServices = dperl_dal:get_enabled(Type), + RunningJobsOrServices = dperl_worker:list(Type), + EnabledJobsOrServices = + lists:map( + fun(#dperlJob{module = Mod, name = Name}) -> {Mod, Name}; + (#dperlService{module = Mod, name = Name}) -> {Mod, Name} + end, JobsORServices), + JobsOrServicesToBeStopped = RunningJobsOrServices -- EnabledJobsOrServices, + [catch dperl_worker:stop(Mod, Name) || {Mod, Name} <- JobsOrServicesToBeStopped], + check_workers(JobsORServices, DNodes); +check_workers([], _DNodes) -> ok; +check_workers([JobOrService | JobsOrServices], DNodes) -> + IsAlive = + case whereis(dperl_worker:child_id(?G(JobOrService,module), + ?G(JobOrService,name))) of + undefined -> false; + Pid when is_pid(Pid) -> is_process_alive(Pid) + end, + try imem_config:reference_resolve(JobOrService) of + JobOrServiceRefResolved -> + case {check_plan(JobOrServiceRefResolved, DNodes), IsAlive} of + {false, true} -> stop(JobOrService); + {true, false} -> start(JobOrService); + {_, _} -> + case check_config(JobOrService) of + ok -> ok; + _ -> stop(JobOrService) + end + end + catch + _:Error -> + ?Error("invalid new config ~p. job/service skipped", [Error], ?ST), + dperl_dal:disable(JobOrService), + ok + end, + check_workers(JobsOrServices, DNodes). + +-spec check_plan(#dperlJob{} | #dperlService{}) -> true | false. +check_plan(JobOrService) -> check_plan(JobOrService, dperl_dal:data_nodes()). + +-spec check_plan(#dperlJob{} | #dperlService{}, list()) -> true | false. +check_plan(#dperlJob{plan = Plan, nodes = Nodes} = Job, DataNodes) -> + check_plan(Job, Plan, Nodes, DataNodes); +check_plan(#dperlService{plan = Plan, nodes = Nodes} = Service, DataNodes) -> + check_plan(Service, Plan, Nodes, DataNodes). + +check_plan(JobOrService, Plan, Nodes, DataNodes) -> + case catch check_plan(Plan, Nodes, DataNodes) of + Result when is_boolean(Result) -> Result; + Error -> + ?Error("checking plan Plan : ~p Error : ~p", [Plan, Error]), + ?Error("Disabling the job/service due to errors"), + dperl_dal:disable(JobOrService), + false + end. + +check_config(JobOrService) -> + Module = ?G(JobOrService,module), + Name = ?G(JobOrService,name), + case ?G(JobOrService, running) of + true -> no_op; + _ -> + %set running to true + dperl_dal:set_running(JobOrService, true) + end, + case dperl_worker:child_spec(?RC(JobOrService), {Module, Name}) of + {ok, #{id := {Module,Name}, modules := [Module], + start := {dperl_worker, start_link, + [ActiveJobOrService | _]}}} -> + ActiveRunning = ?G(ActiveJobOrService,running), + case catch imem_config:reference_resolve( + ?S(JobOrService,running,ActiveRunning)) of + {'ClientError', Error} -> + ?Error("invalid new config ~p. job/service not restarted", + [Error]), + ok; + ActiveJobOrService -> ok; + _ -> stop + end; + Error -> Error + end. + +-spec check_plan(atom(), list(), list()) -> true|false. +check_plan(_, [], _DataNodes) -> true; +%% Running on all nodes +check_plan(on_all_nodes, Nodes, _DataNodes) -> + lists:member(node(), Nodes); +%% Running on atleast one node in the cluster +check_plan(at_least_once, Nodes, DataNodes) -> + case nodes_in_db(Nodes, DataNodes) of + [] -> true; + DBNodes -> hd(DBNodes) == node() + end; +%% Makes sure only one instance runs +check_plan(at_most_once, Nodes, DataNodes) -> + NodesInCluster = nodes_in_db(Nodes, DataNodes), + NodesCountInCluster = length(NodesInCluster), + if + NodesCountInCluster > length(Nodes)/2 -> + hd(NodesInCluster) == node(); + true -> + case get_max_db_island() of + '$none' -> + length(Nodes) == 2 andalso hd(NodesInCluster) == node(); + MaxIslandSize -> + case MaxIslandSize > NodesCountInCluster of + true -> false; + false -> hd(NodesInCluster) == node() + end + end + end; +check_plan(Plan, _, _DataNodes) -> + ?Error("Invalid plan ~p", [Plan]), + error(badarg). + +get_max_db_island() -> get_max_db_island(imem_meta:nodes(), []). +get_max_db_island([], []) -> '$none'; +get_max_db_island([], Acc) -> lists:max(Acc); +get_max_db_island([Node|Nodes], Acc) -> + case rpc:call(Node, imem_meta, data_nodes, [], ?GET_RPC_TIMEOUT) of + {badrpc, _} -> + get_max_db_island(Nodes, Acc); + DBNodes -> + get_max_db_island(Nodes, [length(DBNodes) | Acc]) + end. + +-spec nodes_in_db(list(), list()) -> list(). +nodes_in_db(Nodes, DataNodes) -> nodes_in_db(Nodes, DataNodes, []). +nodes_in_db([], _, NodesInCluster) -> lists:reverse(NodesInCluster); +nodes_in_db([Node | Nodes], DataNodes, NodesInCluster) -> + case lists:member(Node, DataNodes) of + false -> + nodes_in_db(Nodes, DataNodes, NodesInCluster); + _ -> + nodes_in_db(Nodes, DataNodes, [Node|NodesInCluster]) + end. + +start(JobOrService) -> + start(JobOrService, ?G(JobOrService,module), ?G(JobOrService,name)). +start(JobOrService, Mod, Name) -> + ?Debug("start ~p_~s", [Mod, Name]), + try dperl_worker:start(imem_config:reference_resolve(JobOrService)) + catch + _:Error -> + ?Error([{enum, dperl_dal:to_atom(Name)}], + "~p disabled, on start ~p", [Mod, Error], ?ST), + dperl_dal:disable(JobOrService) + end. + +stop(JobOrService) -> + stop(JobOrService, ?G(JobOrService,module), ?G(JobOrService,name)). +stop(JobOrService, Mod, Name) -> + case dperl_worker:is_alive(?RC(JobOrService), Name) of + true -> + ?Debug("stop ~p_~s", [Mod, Name]), + try dperl_worker:stop(Mod, Name) + catch + _:Error -> + ?Error([{enum, dperl_dal:to_atom(Name)}], + "~p disabled, on stop ~p at ~p", [Mod, Error, ?ST]), + dperl_dal:disable(JobOrService) + end; + false -> no_op + end. + +-spec log_nodes_status(list(), atom(), string()) -> ok. +log_nodes_status([], _Type, _Msg) -> ok; +log_nodes_status([Node | Nodes], Type, Msg) -> + ?Warn([{enum, Node}], "~p : ~s", [Type, Msg]), + log_nodes_status(Nodes,Type, Msg). diff --git a/src/dperl_dal.erl b/src/dperl_dal.erl new file mode 100644 index 00000000..b87ff3ea --- /dev/null +++ b/src/dperl_dal.erl @@ -0,0 +1,727 @@ +-module(dperl_dal). + +-include("dperl.hrl"). + +-export([select/2, subscribe/1, unsubscribe/1, write/2, check_table/5, + sql_jp_bind/1, sql_bind_jp_values/2, read_channel/2, + io_to_oci_datetime/1, create_check_channel/1, write_channel/3, + read_check_write/4, read_audit_keys/3, read_audit/3, get_enabled/1, + update_job_dyn/2, update_job_dyn/3, job_error_close/1, to_binary/1, + count_sibling_jobs/2, create_check_channel/2, write_protected/6, + get_last_state/1, get_last_audit_time/1, connect_imem_link/4, + remove_from_channel/2, remove_deep/2, data_nodes/0, job_state/1, + job_error/3, job_error/4, job_error_close/0, update_service_dyn/4, + update_service_dyn/3, all_keys/1, read_gt/3, time_str/1, ts_str/1, + safe_json_map/1, read_gt_lt/4, normalize_map/1, read_keys_gt/3, + write_if_different/3, maps_diff/2, read_keys_gt_lt/4, oci_opts/2, + oci_fetch_rows/2, key_from_json/1, disable/1, set_running/2, + read_siblings/2, read_channel_raw/2, worker_error/4, sort_links/1, + get_pool_name/1, remote_dal/3, get_pool_name/2, run_oci_stmt/3, + activity_logger/3, create_check_index/2, to_atom/1, report_status/7, + key_to_json/1, key_to_json_enc/1]). + +check_table(Table, ColumnNames, ColumnTypes, DefaultRecord, Opts) -> + case catch imem_meta:create_check_table( + Table, {ColumnNames, ColumnTypes, DefaultRecord}, + Opts, system) of + {'EXIT', {'ClientError', _} = Reason} -> + ?Error("create_check table ~p, ~p", [Table, Reason]); + Else -> + ?Info("create_check table ~p... ~p", [Table, Else]) + end. + +-spec create_check_channel(binary() | list()) -> ok | no_return(). +create_check_channel(Channel) when is_list(Channel) -> + create_check_channel(list_to_binary(Channel)); +create_check_channel(Channel) when is_binary(Channel) -> + imem_dal_skvh:create_check_channel(Channel). + +-spec create_check_channel(binary() | list(), list()) -> ok | no_return(). +create_check_channel(Channel, Opts) when is_list(Channel) -> + create_check_channel(list_to_binary(Channel), Opts); +create_check_channel(Channel, Opts) when is_binary(Channel) -> + imem_dal_skvh:create_check_channel(Channel, Opts). + +-spec create_check_index(binary(), list()) -> + ok | {'ClientError', term()} | {'SystemException', term()}. +create_check_index(Channel, IndexDefinition) -> + TableName = to_atom(imem_dal_skvh:table_name(Channel)), + case imem_meta:init_create_index(TableName, IndexDefinition) of + ok -> ok; + {'ClientError',{"Index already exists", _}} -> ok; + Other -> Other + end. + +-spec write_channel(binary(), any(), any()) -> ok | {error, any()}. +write_channel(Channel, Key, Val) when is_map(Val); byte_size(Val) > 0 -> + case catch imem_dal_skvh:write(system, Channel, Key, Val) of + Res when is_map(Res) -> ok; + {'EXIT', Error} -> {error, Error}; + {error, Error} -> {error, Error}; + Other -> {error, Other} + end. + +write_if_different(Channel, Key, Val) -> + case imem_dal_skvh:read(system, Channel, [Key]) of + [#{cvalue := Val}] -> no_op; + _ -> write_channel(Channel, Key, Val) + end. + +read_channel(Channel, Key) when is_list(Channel) -> + read_channel(list_to_binary(Channel), Key); +read_channel(Channel, Key) when is_binary(Channel) -> + case imem_dal_skvh:read(system, Channel, [Key]) of + [#{cvalue := Val}] when is_binary(Val) -> safe_json_map(Val); + [#{cvalue := Val}] -> Val; + _ -> ?NOT_FOUND + end. + +read_channel_raw(Channel, Key) when is_list(Channel) -> + read_channel_raw(list_to_binary(Channel), Key); +read_channel_raw(Channel, Key) when is_binary(Channel) -> + imem_dal_skvh:read(system, Channel, Key). + +read_siblings(Channel, Key) when is_list(Channel) -> + read_siblings(list_to_binary(Channel), Key); +read_siblings(Channel, Key) when is_binary(Channel) -> + imem_dal_skvh:read_siblings(system, Channel, Key). + +read_check_write(Channel, Key, Val, Type) -> + case read_channel(Channel, Key) of + ?NOT_FOUND -> + if + (Type == map andalso is_map(Val)) orelse (Type == bin andalso is_binary(Val)) -> + write_channel(Channel, Key, Val); + Type == bin andalso is_map(Val) -> + write_channel(Channel, Key, imem_json:encode(Val)); + Type == map andalso is_binary(Val) -> + write_channel(Channel, Key, imem_json:decode(Val, [return_maps])) + end; + _ -> no_op + end. + +read_audit_keys(Channel, TimeStamp, Limit) when is_list(Channel) -> + read_audit_keys(list_to_binary(Channel), TimeStamp, Limit); +read_audit_keys(Channel, TimeStamp, Limit) when is_binary(Channel) -> + case imem_dal_skvh:audit_readGT(system, Channel, TimeStamp, Limit) of + Audits when length(Audits) > 0 -> + [#{time := StartTime} | _] = Audits, + #{time := EndTime} = lists:last(Audits), + {StartTime, EndTime, + [K || #{ckey := K} <- Audits, K /= undefined]}; + _ -> {TimeStamp, TimeStamp, []} + end. + +read_audit(Channel, TimeStamp, Limit) when is_list(Channel) -> + read_audit(list_to_binary(Channel), TimeStamp, Limit); +read_audit(Channel, TimeStamp, Limit) when is_binary(Channel) -> + case imem_dal_skvh:audit_readGT(system, Channel, TimeStamp, Limit) of + Audits when length(Audits) > 0 -> + [#{time := StartTime} | _] = Audits, + #{time := EndTime} = lists:last(Audits), + {StartTime, EndTime, + [#{key => K, + oval => if is_binary(Ov) -> safe_json_map(Ov); + true -> Ov end, + nval => if is_binary(Nv) -> safe_json_map(Nv); + true -> Nv end} + || #{ckey := K, ovalue := Ov, nvalue := Nv} <- Audits, K /= undefined] + }; + _ -> {TimeStamp, TimeStamp, []} + end. + +remove_from_channel(Channel, Key) when is_list(Channel) -> + remove_from_channel(list_to_binary(Channel), Key); +remove_from_channel(Channel, Key) -> + case imem_dal_skvh:read(system, Channel, [Key]) of + [] -> no_op; + Row -> imem_dal_skvh:remove(system, Channel, Row) + end. + +remove_deep(Channel, BaseKey) -> + Rows = imem_dal_skvh:read_deep(system, Channel, [BaseKey]), + imem_dal_skvh:remove(system, Channel, Rows). + +read_gt(Channel, StartKey, BulkSize) when is_binary(Channel) -> + lists:map( + fun(#{ckey := Key, cvalue := Val}) -> {Key, safe_json_map(Val)} end, + imem_dal_skvh:readGT(system, Channel, StartKey, BulkSize)). + +read_gt_lt(Channel, StartKey, EndKey, BulkSize) -> + KeyVals = case imem_dal_skvh:readGELTMap(system, Channel, + StartKey, EndKey, BulkSize + 1) of + [#{ckey := StartKey} | Rest] -> Rest; + List -> lists:sublist(List, BulkSize) + end, + lists:map(fun(#{ckey := Key, cvalue := Val}) -> {Key, safe_json_map(Val)} end, + KeyVals). + +read_keys_gt(Channel, StartKey, BulkSize) -> + read_keys_gt_lt(Channel, StartKey, <<255>>, BulkSize). + +read_keys_gt_lt(Channel, StartKey, EndKey, BulkSize) -> + case imem_dal_skvh:readGELTKeys(system, Channel, StartKey, EndKey, BulkSize + 1) of + [StartKey | T] -> T; + Keys -> lists:sublist(Keys, BulkSize) + end. + +subscribe(Event) -> imem_meta:subscribe(Event). +unsubscribe(Event) -> imem_meta:unsubscribe(Event). +write(Table, Record) -> imem_meta:write(Table, Record). +select(Table, MatchSpec) -> imem_meta:select(Table, MatchSpec). +sql_jp_bind(Sql) -> imem_meta:sql_jp_bind(Sql). + +sql_bind_jp_values(BindParamsMeta, JpPathBinds) -> + imem_meta:sql_bind_jp_values(BindParamsMeta, JpPathBinds). + +io_to_oci_datetime(Dt) -> + oci_util:to_dts(imem_datatype:io_to_datetime(Dt)). + +get_enabled(job) -> + {Jobs, _} = imem_meta:select(dperlJob, + [{#dperlJob{enabled=true, _='_'},[],['$_']}]), + Jobs; +get_enabled(service) -> + {Services, _} = imem_meta:select(dperlService, + [{#dperlService{enabled=true, _='_'},[], + ['$_']}]), + Services. + +disable(#dperlJob{name = Name}) -> + update_job_dyn(Name, error), + imem_meta:transaction(fun() -> + case imem_meta:read(dperlJob, Name) of + [Job] -> + ok = imem_meta:write(dperlJob, Job#dperlJob{enabled = false, running = false}); + _ -> no_op + end + end); +disable(#dperlService{name = Name}) -> + imem_meta:transaction(fun() -> + case imem_meta:read(dperlService, Name) of + [Service] -> + ok = imem_meta:write(dperlService, + Service#dperlService{enabled = false, running = false}); + _ -> no_op + end + end). + +set_running(#dperlJob{name = Name}, Running) -> + imem_meta:transaction(fun() -> + case imem_meta:read(dperlJob, Name) of + [#dperlJob{running = _} = Job] -> + ok = imem_meta:write(dperlJob, Job#dperlJob{running = Running}); + _ -> no_op + end + end); +set_running(#dperlService{name = Name}, Running) -> + imem_meta:transaction(fun() -> + case imem_meta:read(dperlService, Name) of + [#dperlService{running = _} = Service] -> + ok = imem_meta:write(dperlService, Service#dperlService{running = Running}); + _ -> no_op + end + end). + + +update_service_dyn(ServiceName, State, ActiveThreshold, OverloadThreshold) + when is_binary(ServiceName), is_map(State), is_integer(ActiveThreshold), + is_integer(OverloadThreshold) -> + Status = + case maps:get(req, State, 0) of + Req when Req < ActiveThreshold -> idle; + Req when Req >= ActiveThreshold andalso + Req < OverloadThreshold -> active; + Req when Req >= OverloadThreshold -> overload + end, + imem_meta:transaction(fun() -> + case imem_meta:read(?SERVICEDYN_TABLE, ServiceName) of + [] -> + update_service_dyn(ServiceName, State, Status); + [#dperlServiceDyn{state = OldState}] -> + NewState = maps:merge(OldState, State), + if OldState /= NewState -> + update_service_dyn(ServiceName, NewState, Status); + true -> ok + end + end + end). + +update_service_dyn(ServiceName, State, Status) when is_binary(ServiceName) andalso + is_map(State) andalso (Status == stopped orelse Status == idle orelse + Status == active orelse Status == overload) -> + imem_meta:write( + ?SERVICEDYN_TABLE, + #dperlServiceDyn{name = ServiceName, state = State, + status = Status, statusTime = imem_meta:time_uid()}). + +update_job_dyn(JobName, State) when is_binary(JobName) andalso is_map(State) -> + imem_meta:transaction(fun() -> + case imem_meta:read(?JOBDYN_TABLE, JobName) of + [] -> + ok = imem_meta:write( + ?JOBDYN_TABLE, + #dperlNodeJobDyn{name = JobName, state = State, + status = synced, + statusTime = imem_meta:time_uid()}); + [#dperlNodeJobDyn{state=OldState} = J] -> + NewState = maps:merge(OldState, State), + if OldState /= NewState -> + ok = imem_meta:write( + ?JOBDYN_TABLE, + J#dperlNodeJobDyn{state = NewState, + statusTime = imem_meta:time_uid()}); + true -> ok + end + end end); +update_job_dyn(JobName, Status) + when is_binary(JobName) andalso + (Status == synced orelse + Status == cleaning orelse Status == cleaned orelse + Status == refreshing orelse Status == refreshed orelse + Status == idle orelse Status == error orelse Status == stopped) -> + case imem_meta:read(?JOBDYN_TABLE, JobName) of + [] -> + ok = imem_meta:write( + ?JOBDYN_TABLE, + #dperlNodeJobDyn{name = JobName, state = #{}, + status = Status, + statusTime = imem_meta:time_uid()}); + [#dperlNodeJobDyn{status = OldStatus} = J] -> + if OldStatus /= Status orelse + (OldStatus == Status andalso Status == error) -> + ok = imem_meta:write( + ?JOBDYN_TABLE, + J#dperlNodeJobDyn{status = Status, + statusTime = imem_meta:time_uid()}); + true -> ok + end + end. + +update_job_dyn(JobName, State, Status) + when is_binary(JobName) andalso is_map(State) andalso + (Status == synced orelse Status == undefined orelse + Status == cleaning orelse Status == cleaned orelse + Status == refreshing orelse Status == refreshed orelse + Status == idle orelse Status == error orelse Status == stopped) -> + case imem_meta:read(?JOBDYN_TABLE, JobName) of + [] -> + ok = imem_meta:write( + ?JOBDYN_TABLE, + #dperlNodeJobDyn{name = JobName, state = State, + status = Status, + statusTime = imem_meta:time_uid()}); + [#dperlNodeJobDyn{state=OldState, status=OldStatus, statusTime = OTime} = J] -> + NewState = maps:merge(OldState,State), + TimeDiff = imem_datatype:sec_diff(OTime), + if NewState /= OldState orelse (OldStatus == error andalso Status /= idle) + orelse (OldStatus /= error andalso Status /= OldStatus) + orelse (OldStatus == Status andalso Status == idle) + orelse TimeDiff > 1 -> + ok = imem_meta:write( + ?JOBDYN_TABLE, + J#dperlNodeJobDyn{state = NewState, status = Status, + statusTime = imem_meta:time_uid()}); + true -> ok + end + end. + +get_last_state(JobName) when is_binary(JobName) -> + case imem_meta:read(?JOBDYN_TABLE, {JobName, node()}) of + [#dperlNodeJobDyn{state = State}] -> + State; + _ -> #{} + end. + +get_last_audit_time(JobName) -> + case get_last_state(JobName) of + #{lastAuditTime := LastAuditTime} -> + LastAuditTime; + _ -> {0,0,0} + end. + +count_sibling_jobs(Module, Channel) -> + {Args, true} = imem_meta:select(dperlJob, [{#dperlJob{module=Module, + srcArgs='$1', dstArgs = '$2', _= '_'}, [], [{{'$1', '$2'}}]}]), + length(lists:filter(fun({#{channel := Chn}, _}) when Chn == Channel -> true; + ({_, #{channel := Chn}}) when Chn == Channel -> true; + (_) -> false + end, Args)). + +connect_imem_link(ActiveLink, Links, User, Password) when is_list(Password) -> + connect_imem_link(ActiveLink, Links, User, list_to_binary(Password)); +connect_imem_link(ActiveLink, Links, User, Password) when is_binary(Password) -> + Link = lists:nth(ActiveLink, Links), + case connect_imem(User, erlang:md5(Password), Link) of + {ok, Session, Pid} -> + {ok, Session, Pid}; + {error, Error} -> + NewActiveLink = if + length(Links) > ActiveLink -> + ActiveLink + 1; + true -> + 1 + end, + {Error, NewActiveLink} + end. + +connect_imem(User, Password, #{ip := Ip, port := Port, secure := Secure, + schema := Schema}) -> + case erlimem:open({tcp, Ip, Port, if Secure -> [ssl]; true -> [] end}, + Schema) of + {ok, {erlimem_session, Pid} = Session} -> + case catch Session:auth(?MODULE,<<"TODO">>,{pwdmd5,{User,Password}}) of + OK when OK == ok; element(1, OK) == ok -> + case catch Session:run_cmd(login,[]) of + {error, _} -> + {error, eaccess}; + _ -> + {ok, Session, Pid} + end; + {'EXIT', _} -> + {error, eaccess}; + _ -> + {error, eaccess} + end; + Error -> + {error, Error} + end. + +data_nodes() -> data_nodes(imem_meta:data_nodes(), []). +data_nodes([], Acc) -> Acc; +data_nodes([{_,Node}|DataNodes], Acc) -> data_nodes(DataNodes, [Node|Acc]). + +job_state(Name) -> imem_meta:read(?JOBDYN_TABLE, Name). + +report_status(Module, StatusTable, {Channel, ShortId}, JobName, Status) when is_binary(Channel) -> + report_status(Module, StatusTable, {binary_to_list(Channel), ShortId}, JobName, Status); +report_status(Module, StatusTable, {Channel, ShortId}, JobName, Status) when is_binary(ShortId) -> + report_status(Module, StatusTable, {Channel, binary_to_list(ShortId)}, JobName, Status); +report_status(Module, StatusTable, {Channel, ShortId}, JobName, Status) when is_binary(JobName) -> + report_status(Module, StatusTable, {Channel, ShortId}, binary_to_list(JobName), Status); +report_status(Module, StatusTable, {Channel, ShortId}, JobName, Status) when is_integer(ShortId) -> + report_status(Module, StatusTable, {Channel, integer_to_list(ShortId)}, JobName, Status); +report_status(Module, StatusTable, {Channel, ShortId}, JobName, Status) when is_list(StatusTable) -> + report_status(Module, list_to_binary(StatusTable), {Channel, ShortId}, JobName, Status); +report_status(Module, StatusTable, {Channel, ShortId}, JobName, Status) -> + try + write_channel(StatusTable, + [atom_to_list(Module), Channel, ShortId, JobName], Status) + catch + C:E -> + ?Error("~p,~p to ~p : ~p", + [Channel, ShortId, StatusTable, {C,E}], ?ST) + end. + +%% pusher report_status +report_status(StatusKey, AuditTimeOrKey, no_op, StatusTable, Channel, JobName, Module) -> + report_status(StatusKey, AuditTimeOrKey, #{}, StatusTable, Channel, JobName, Module); +report_status(StatusKey, AuditTimeOrKey, {error, Error}, StatusTable, Channel, JobName, Module) -> + report_status(StatusKey, AuditTimeOrKey, error_obj(Error), StatusTable, Channel, JobName, Module); +report_status(StatusKey, AuditTimeOrKey, Error, StatusTable, Channel, JobName, Module) when not is_map(Error) -> + report_status(StatusKey, AuditTimeOrKey, error_obj(Error), StatusTable, Channel, JobName, Module); +report_status(StatusKey, AuditTimeOrKey, Status, StatusTable, Channel, JobName, Module) when is_map(Status) -> + AuditTime = + case AuditTimeOrKey of + {at, ATime} -> + ATime; + Key -> + get_key_audit_time(Channel, Key) + end, + report_status(Module, StatusTable, {Channel, StatusKey}, JobName, status_obj(AuditTime, Status)). + +worker_error(service, _Type, _Operation, _Msg) -> no_op; +worker_error(job, Type, Operation, Msg) -> + job_error(Type, Operation, Msg). + +job_error(s, Operation, Msg) -> job_error(<<"sync">>, Operation, Msg); +job_error(c, Operation, Msg) -> job_error(<<"cleanup">>, Operation, Msg); +job_error(r, Operation, Msg) -> job_error(<<"refresh">>, Operation, Msg); +job_error(i, Operation, Msg) -> job_error(<<"idle">>, Operation, Msg); +job_error(f, Operation, Msg) -> job_error(<<"finish">>, Operation, Msg); +job_error(undefined, Operation, Msg) -> job_error(<<"undefined">>, Operation, Msg); +job_error(Type, Operation, Msg) -> + job_error(undefined, Type, Operation, Msg). + +job_error(Key, Type, Operation, Msg) when is_binary(Type), is_binary(Operation)-> + JobName = binary_to_list(get(name)), + ErKey = if Key == undefined -> [JobName, ?NODE]; + true -> [JobName, ?NODE, Key] + end, + case read_channel(?JOB_ERROR, ErKey) of + ?NOT_FOUND -> + Value = #{<<"TYPE">> => Type, + <<"OPERATION">> => Operation, + <<"TIMESTAMP">> => imem_datatype:timestamp_to_io(imem_meta:time()), + <<"MESSAGE">> => + if + is_binary(Msg) -> + case io_lib:printable_list(binary_to_list(Msg)) of + true -> Msg; + false -> list_to_binary(io_lib:format("~p", [Msg])) + end; + is_list(Msg) -> + case io_lib:printable_list(Msg) of + true -> list_to_binary(Msg); + false -> list_to_binary(lists:flatten(io_lib:format("~p", [Msg]))) + end; + true -> + list_to_binary(lists:flatten(io_lib:format("~p", [Msg]))) + end}, + write_channel(?JOB_ERROR, ErKey, imem_json:encode(Value)); + _ -> no_op + end. + +job_error_close() -> job_error_close(undefined). + +job_error_close(undefined) -> + remove_from_channel(?JOB_ERROR, [binary_to_list(get(name)), ?NODE]); +job_error_close(Key) -> + remove_from_channel(?JOB_ERROR, [binary_to_list(get(name)), ?NODE, Key]). + +all_keys(Channel) when is_list(Channel) -> + all_keys(list_to_existing_atom(Channel)); +all_keys(Channel) when is_binary(Channel) -> + all_keys(binary_to_existing_atom(Channel, utf8)); +all_keys(Channel) when is_atom(Channel) -> + lists:map( + fun(K) -> sext:decode(K) end, + imem_meta:return_atomic( + imem_meta:transaction(fun mnesia:all_keys/1, [Channel])) + ). + +time_str(Time) -> + case Time div 1000 of + TimeMs when TimeMs > 1000 -> + integer_to_list(TimeMs div 1000)++"s"; + TimeMs -> + integer_to_list(TimeMs)++"ms" + end. + +-spec ts_str(ddTimestamp() | ddTimeUID()) -> binary(). +ts_str(TimeStamp) -> imem_datatype:timestamp_to_io(TimeStamp). + +-spec safe_json_map(binary()) -> map(). +safe_json_map(Value) when is_binary(Value) -> + case catch imem_json:decode(Value, [return_maps]) of + {'EXIT', _} -> + imem_json:decode( + unicode:characters_to_binary(Value, latin1, utf8), + [return_maps]); + null -> null; + DecodedValue when is_map(DecodedValue) -> DecodedValue; + DecodedValue when is_list(DecodedValue) -> DecodedValue + end. + +-spec normalize_map(map()) -> map(). +normalize_map(Map) when is_map(Map)-> + maps:map( + fun(_, V) when is_list(V) -> lists:sort(V); + (_, V) when is_map(V) -> normalize_map(V); + (_, V) -> V + end, Map); +normalize_map(M) -> M. + +write_protected(Channel, Key, Prov, IsSamePlatform, Config, Type) -> + case re:run(Config, <<"(?i)dperl">>) of + nomatch -> + %% protectedConfig is target + case read_channel(Channel, Key) of + ?NOT_FOUND -> write_channel(Channel, Key, <<"null">>); + #{<<"AuditTime">> := _} -> write_channel(Channel, Key, <<"null">>); + _ -> no_op + end; + _ -> if IsSamePlatform == true -> read_check_write(Channel, Key, Prov, Type); + true -> no_op + end + end. + +maps_diff(Src, Dst) when is_map(Src), is_map(Dst) -> + DstKeys = maps:keys(Dst), + SrcKeys = maps:keys(Src), + if SrcKeys == DstKeys -> + lists:foldl( + fun(K, M) -> + L = maps:get(K, Src, '$missing'), + R = maps:get(K, Dst, '$missing'), + if L /= R -> + M#{K => #{local => L, remote => R}}; + true -> M + end + end, #{}, DstKeys); + true -> + #{localMissinKeys => SrcKeys -- DstKeys, remoteMissingKeys => DstKeys -- SrcKeys} + end. + +key_from_json([]) -> []; +key_from_json([B | Key]) when is_binary(B) -> + [unicode:characters_to_list(B) | key_from_json(Key)]; +key_from_json([T | Key]) -> [T | key_from_json(Key)]; +key_from_json(KeyJson) when is_binary(KeyJson) -> + key_from_json(imem_json:decode(KeyJson)). + +-spec key_to_json(integer() | [list() | binary()]) -> [binary()] | binary(). +key_to_json(Key) when is_integer(Key) -> integer_to_binary(Key); +key_to_json([]) -> []; +key_to_json([L | Key]) when is_list(L) -> + [unicode:characters_to_binary(L) | key_to_json(Key)]; +key_to_json([K | Key]) -> + [K | key_to_json(Key)]. + +-spec key_to_json_enc(integer() | [list() | binary()]) -> binary(). +key_to_json_enc(Key) when is_integer(Key) -> integer_to_binary(Key); +key_to_json_enc(Key) when is_list(Key) -> + imem_json:encode(key_to_json(Key)). + +oci_opts(LogFun, Opts) -> + PState = {pstate, #{jname => get(jname)}}, + [{logfun, LogFun} | + case proplists:get_value(ociOpts, Opts, '$none') of + '$none' -> + [{ociOpts, [{logging, true}, PState]} | Opts]; + OciOpts -> + case proplists:get_value(logging, OciOpts, '$none') of + '$none' -> + [{ociOpts, [{logging, true}, PState | OciOpts]} | Opts]; + _ -> + [{ociOpts, [PState | OciOpts]} | Opts] + end + end]. + +oci_fetch_rows(Stmt, Limit) -> + case Stmt:fetch_rows(Limit) of + {{rows, []}, _} -> []; + {{rows, Rows}, true} -> Rows; + {{rows, Rows}, false} when length(Rows) >= Limit -> Rows; + {{rows, Rows}, false} when length(Rows) < Limit -> + Rows ++ oci_fetch_rows(Stmt, Limit - length(Rows)) + end. + +sort_links(Links) -> + lists:sort(fun(#{prio := A}, #{prio := B}) -> A < B end, + [L || #{use := true} = L <- Links]). + +-spec get_pool_name(tuple()) -> atom(). +get_pool_name(#dperlJob{name = Name, dstArgs = DstArgs}) -> + to_atom(maps:get(poolName, DstArgs, Name)). + +-spec get_pool_name(map(), term()) -> atom(). +get_pool_name(#{poolName := PoolName}, _Default) -> to_atom(PoolName); +get_pool_name(_, Default) -> to_atom(Default). + +-spec to_atom(binary() | list() | atom()) -> atom(). +to_atom(Bin) when is_binary(Bin) -> binary_to_atom(Bin, utf8); +to_atom(List) when is_list(List) -> list_to_atom(List); +to_atom(Atom) when is_atom(Atom) -> Atom. + +-spec to_binary(binary() | list() | atom()) -> binary(). +to_binary(List) when is_list(List) -> + list_to_binary(List); +to_binary(Atom) when is_atom(Atom) -> + atom_to_binary(Atom, utf8); +to_binary(Bin) when is_binary(Bin) -> + Bin. + +-spec remote_dal(tuple(), atom(), list()) -> term(). +remote_dal(Session, Fun, Args) -> + case catch Session:run_cmd(dal_exec, [imem_dal_skvh, Fun, Args]) of + {ok, Result} -> {ok, Result}; + {'EXIT', Error} -> + ?JError("imem_dal_skvh:~p(~p). Error : ~p", [Fun, Args, Error]), + throw(Error); + {error, {{_, Error}, _}} -> + ?JError("imem_dal_skvh:~p(~p). Error : ~p", [Fun, Args, Error]), + throw(element(1, Error)); + Error when is_tuple(Error) -> + ?JError("imem_dal_skvh:~p(~p). Error : ~p", [Fun, Args, Error]), + throw(Error); + Result -> Result + end. + +-spec run_oci_stmt(tuple(), tuple(), integer()) -> list() | tuple(). +run_oci_stmt(Stmt, Params, Limit) -> + case catch Stmt:exec_stmt([Params]) of + {cols, _} -> + dperl_dal:oci_fetch_rows(Stmt, Limit); + Error -> + ?JError("Error processing result: ~p", [Error]), + {error, Error} + end. + +-spec activity_logger(map(), list(), list()) -> ok. +activity_logger(StatusCtx, Name, Extra) -> + case global:whereis_name(Name) of + undefined -> + SInterval = ?STATUS_INTERVAL, + SFile = ?STATUS_FILE, + {ok, HostName} = inet:gethostname(), + Pid = spawn_link( + fun() -> + log_activity(StatusCtx, HostName, SFile, Extra, SInterval) + end), + global:register_name(Name, Pid), + ?JInfo("activity logger started for ~p ~p", [Name, Pid]); + Pid -> + ?JDebug("activity logger already running ~p", [Pid]) + end. + +%%------------------------------------------------------------------------------ +%% private +%%------------------------------------------------------------------------------ + +-spec error_obj(term()) -> #{error => binary()}. +error_obj(Error) when is_binary(Error) -> + #{error => Error}; +error_obj(Error) when is_list(Error) -> + error_obj(list_to_binary(Error)); +error_obj(Error) -> + error_obj(imem_datatype:term_to_io(Error)). + +-spec status_obj(list() | tuple() | undefined, map()) -> map(). +status_obj(AuditTime, Status) when is_tuple(AuditTime) -> + status_obj(tuple_to_list(AuditTime), Status); +status_obj(AuditTime, Status) when is_map(Status) -> + Status#{auditTime => AuditTime}. + +-spec get_key_audit_time(binary(), term()) -> ddTimestamp() | undefined. +get_key_audit_time(Channel, Key) -> + case catch read_channel(Channel, Key) of + #{<<"AuditTime">> := AuditTime} -> + AuditTime; + _ -> + undefined + end. + +log_activity(Ctx, HostName, SFile, Extra, SInterval) -> + erlang:send_after(SInterval, self(), write_status), + receive + write_status -> + <> = + imem_datatype:timestamp_to_io(imem_meta:time()), + % timestamp format 2019-01-10 17:43:01 + Time = [Y, "-", M, "-", D, " ", H, ":", Mi, ":", S], + Data = [HostName, "\n", Time, "\n", Extra, "\n"], + imem_file:write_file(Ctx, SFile, Data, 3000), + log_activity(Ctx, HostName, SFile, Extra, SInterval) + end. + +%------------------------------------------------------------------------------- +% TESTS +%------------------------------------------------------------------------------- + +-ifdef(TEST). + +-include_lib("eunit/include/eunit.hrl"). + +all_test_() -> + {inparallel, + [ {"key from json", ?_assertEqual(["test", 1234], key_from_json(<<"[\"test\", 1234]">>))} + , {"key to json", ?_assertEqual([<<"test">>, 1234], key_to_json(["test", 1234]))} + , {"key to json int", ?_assertEqual(<<"0">>, key_to_json(0))} + , {"key to json enc", ?_assertEqual(<<"[\"test\",1234]">>, key_to_json_enc(["test", 1234]))} + , {"key to json enc int", ?_assertEqual(<<"0">>, key_to_json_enc(0))} + ] + }. + +-endif. diff --git a/src/dperl_metrics.erl b/src/dperl_metrics.erl new file mode 100644 index 00000000..4fae5edc --- /dev/null +++ b/src/dperl_metrics.erl @@ -0,0 +1,396 @@ +-module(dperl_metrics). + +-include("dperl.hrl"). +-include("dperl_status.hrl"). + +-behaviour(imem_gen_metrics). + +-export([start_link/0,get_metric/1,get_metric/2,request_metric/3,get_metric_direct/1]). + +-export([init/0,handle_metric_req/3,request_metric/1, terminate/2]). + +-export([mbsKey/2,ramsKey/2,mproKey/2,intKey/2]). + +-safe([get_metric/1, get_metric_direct/1]). + +-spec start_link() -> {ok, pid()} | {error, term()}. +start_link() -> + imem_gen_metrics:start_link(?MODULE). + +-spec get_metric(term()) -> term(). +get_metric(MetricKey) -> + imem_gen_metrics:get_metric(?MODULE, MetricKey). + +-spec get_metric(term(), integer()) -> term(). +get_metric(MetricKey, Timeout) -> + imem_gen_metrics:get_metric(?MODULE, MetricKey, Timeout). + +-spec request_metric(term(), term(), pid()) -> term(). +request_metric(MetricKey, ReqRef, ReplyTo) -> + imem_gen_metrics:request_metric(?MODULE, MetricKey, ReqRef, ReplyTo). + +-spec get_metric_direct(term()) -> term(). +get_metric_direct(job_down_count) -> + Exclusions = lists:usort(?JOB_DOWN_NAME_EXCLUSIONS ++ modules_to_jobs(?JOB_DOWN_MOD_EXCLUSIONS)), + job_down_count(Exclusions); +get_metric_direct(job_error_count) -> + Exclusions = lists:usort(?JOB_ERROR_NAME_EXCLUSIONS ++ modules_to_jobs(?JOB_ERROR_MOD_EXCLUSIONS)), + job_error_count(Exclusions); +get_metric_direct(UnknownMetric) -> + ?Error("Unknown metric requested direct ~p", [UnknownMetric]), + undefined. + +-spec request_metric(term()) -> noreply | {ok, term()}. +request_metric({focus, "shortid", ShortIdStr}) -> + case catch list_to_integer(ShortIdStr) of + {'EXIT', _Error} -> {ok, {error, user_input}}; + _ -> noreply + end; +request_metric(_) -> noreply. + +%% imem_gen_metrics callback +init() -> {ok, undefined}. + +handle_metric_req({jobs, ExcludedJobs}, ReplyFun, State) -> + ReplyFun(process_jobs(imem_meta:dirty_read(dperlJob), ExcludedJobs)), + State; +handle_metric_req({job_status, ExcludedJobs}, ReplyFun, State) -> + ReplyFun(process_status(imem_meta:dirty_read(?JOBDYN_TABLE), ExcludedJobs)), + State; +handle_metric_req({errors, ExcludedJobs}, ReplyFun, State) -> + ReplyFun(process_errors(dperl_dal:read_gt(?JOB_ERROR, {}, 1000), ExcludedJobs)), + State; +handle_metric_req({job_down_count, ExcludedJobs}, ReplyFun, State) -> + JobDownCount = job_down_count([list_to_bin(J) || J <- get_ignored_jobs(ExcludedJobs)]), + ReplyFun(JobDownCount), + State; +handle_metric_req({job_error_count, ExcludedJobs}, ReplyFun, State) -> + JobErrorCount = job_error_count([list_to_bin(J) || J <- get_ignored_jobs(ExcludedJobs)]), + ReplyFun(JobErrorCount), + State; +handle_metric_req({focus, "shortid", ShortId}, ReplyFun, State) when is_list(ShortId) -> + Result = maps:fold( + fun(Chn, Fun, Acc) when is_list(Chn), is_atom(Fun) -> + case not erlang:function_exported(?MODULE, Fun, 2) orelse + ?MODULE:Fun("shortid", ShortId) of + true -> Acc; + {API, Keys} -> + case catch dperl_dal:API(Chn, Keys) of + Values when is_list(Values) -> + [#{ckey => + ["focus", "cluster", ["shortid", ShortId], + [Chn, + if is_list(CKey) -> + [if is_atom(CK) -> atom_to_list(CK); + true -> CK + end || CK <- CKey]; + true -> CKey + end]], + cvalue => CVal} + || #{ckey := CKey, cvalue := CVal} <- Values] ++ Acc; + _ -> Acc + end; + nomatch -> Acc; + _Error -> Acc + end; + (_, _, Acc) -> Acc + end, [], ?GET_FOCUS("shortid")), + ReplyFun(Result), + State; +handle_metric_req({focus, "shortid", _ShortId}, ReplyFun, State) -> + ?Warn("ShortId has to be a list"), + ReplyFun([]), + State; +%% aggregator metrics +handle_metric_req({agr, AgrMetric, Channel}, ReplyFun, State) when Channel == [] -> + ?JError("Channel cannot be empty for aggregator metric : ~p", [AgrMetric]), + ReplyFun(undefined), + State; +handle_metric_req({agr, AgrMetric, Channel}, ReplyFun, State) when is_list(Channel) -> + handle_metric_req({agr, AgrMetric, list_to_bin(Channel)}, ReplyFun, State); +handle_metric_req({agr, AgrMetric, Channel}, ReplyFun, State) when is_binary(Channel) -> + ReplyFun(fetch_metric(AgrMetric, Channel)), + State; +handle_metric_req(UnknownMetric, ReplyFun, State) -> + ?Error("Unknown metric requested ~p when state ~p", [UnknownMetric, State]), + ReplyFun({error, unknown_metric}), + State. + +terminate(_Reason, _State) -> ok. + +%% Helper functions +mbsKey("shortid", [I|_] = ShortId) when I >= $0, I =< $9 -> + {read_siblings, [[ShortId,[]]]}; +mbsKey("shortid", SubKey) -> + case re:split(SubKey, "-", [{return, list}]) of + [_,_,[I|_] = ShortId|_] when I >= $0, I =< $9 -> + {read_channel_raw, [[ShortId,SubKey]]}; + _ -> nomatch + end. + +ramsKey("shortid", [I|_] = ShortId) when I >= $0, I =< $9 -> + {read_siblings, [[ShortId,[]]]}. + +intKey("shortid", ShortIdStr) when is_list(ShortIdStr) -> + case catch list_to_integer(ShortIdStr) of + ShortId when is_integer(ShortId) -> + {read_channel_raw, [ShortId]}; + _ -> nomatch + end. + +mproKey("shortid", ShortIdStr) when is_list(ShortIdStr) -> + case catch list_to_integer(ShortIdStr) of + ShortId when is_integer(ShortId) -> + {read_channel_raw, [[P,ShortId] || P <- [smpp, tpi]]}; + _ -> nomatch + end. + +-spec process_jobs([#dperlJob{}], list()) -> [map()]. +process_jobs(Jobs, ExcludedJobs) -> + process_jobs(Jobs, get_ignored_jobs(ExcludedJobs), ?JOB_DESCRIPTIONS). + +-spec process_jobs([#dperlJob{}], list(), map()) -> [map()]. +process_jobs([], _, _) -> []; +process_jobs([#dperlJob{name = Name, module = Module, args = Args, + srcArgs = SrcArgs, dstArgs = DstArgs, running = Running, + enabled = Enabled, plan = Plan, nodes = Nodes, + opts = Opts} | Rest], IgnoredJobs, JobDescriptions) -> + {Channel, Direction} = case DstArgs of + #{channel := DstChannel} -> {DstChannel, pull}; + _ -> case SrcArgs of + #{channel := SrcChannel} -> {SrcChannel, push}; + _ -> {"none", push} + end + end, + case is_ignored_job(Name, IgnoredJobs) of + true -> process_jobs(Rest, IgnoredJobs, JobDescriptions); + false -> + JobName = bin_to_list(Name), + Label = case Args of + #{label := L} when is_list(L) -> list_to_bin(L); + #{label := L} when is_binary(L) -> L; + _ -> <<>> + end, + Desc = maps:get(Label, JobDescriptions, <<>>), + Value = #{module => Module, args => to_json(Args), srcArgs => to_json(SrcArgs), + dstArgs => to_json(DstArgs), enabled => Enabled, plan => Plan, + nodes => Nodes, opts => Opts, channel => list_to_bin(Channel), + direction => Direction, running => Running, platform => Label, + desc => Desc}, + [{JobName, Value} | process_jobs(Rest, IgnoredJobs, JobDescriptions)] + end. + +-spec process_status([#dperlNodeJobDyn{}], list()) -> [map()]. +process_status(Statuses, ExcludedJobs) -> + process_status(Statuses, get_ignored_jobs(ExcludedJobs), []). + +-spec process_status([#dperlNodeJobDyn{}], list(), list()) -> [map()]. +process_status([], _, Acc) -> Acc; +process_status([#dperlNodeJobDyn{name = Name, state = State, statusTime = StatusTime, + status = Status} | Rest], IgnoredJobs, Acc) -> + case is_ignored_job(Name, IgnoredJobs) of + false -> + case is_status_timed_out(StatusTime) of + false -> + JobName = bin_to_list(Name), + Value = #{status => Status, + state => to_json(State)}, + process_status(Rest, IgnoredJobs, [{JobName, Value} | Acc]); + true -> process_status(Rest, IgnoredJobs, Acc) + end; + true -> process_status(Rest, IgnoredJobs, Acc) + end. + +-spec process_errors([{term(), map()}], list()) -> [map()]. +process_errors(Errors, ExcludedJobs) -> process_errors(Errors, get_ignored_jobs(ExcludedJobs), []). + +-spec process_errors([{term(), map()}], list(), list()) -> [map()]. +process_errors([], _, Acc) -> Acc; +process_errors([{ErrorKey, Value} | Rest], IgnoredJobs, Acc) -> + %% TODO: What to do when ErrorKey doesn't match ?... + NewAcc = + case ErrorKey of + [Job, Node] -> + case is_ignored_job(Job, IgnoredJobs) of + false -> + [{[Job, Node], Value} | Acc]; + true -> Acc + end; + [Job, Node, ErrorId] -> + case is_ignored_job(Job, IgnoredJobs) of + false -> + Key = [Job, Node, ensure_json(ErrorId)], + [{Key, Value} | Acc]; + true -> Acc + end; + InvalidErrorKey -> + ?Error("Invalid error key processing errors: ~p", [InvalidErrorKey]), + Acc + end, + process_errors(Rest, IgnoredJobs, NewAcc). + +-spec job_error_count(list()) -> integer(). +job_error_count(NameExclusions) -> + case imem_meta:select(?JOBDYN_TABLE, [{#dperlNodeJobDyn{status = error, + statusTime = '$1', name ='$2', _ = '_'}, [], [{{'$1', '$2'}}]}]) of + {[], true} -> 0; + {Infos, true} -> + lists:foldl(fun({StatusTime, Name}, Acc) -> + case is_status_timed_out(StatusTime) of + true -> Acc; + false -> + case lists:member(Name, NameExclusions) of + false -> Acc + 1; + true -> Acc + end + end + end, 0, Infos) + end. + +-spec job_down_count(list()) -> integer(). +job_down_count(NameExclusions) -> + case imem_meta:select(dperlJob, [{#dperlJob{name ='$1', plan = '$2', nodes = '$3', _ = '_'}, [], [{{'$1', '$2', '$3'}}]}]) of + {[], true} -> 0; + {JobPlans, true} -> + lists:foldl( + fun({JobName, Plan, Nodes}, Acc) -> + case not lists:member(JobName, NameExclusions) of + true -> case job_down_count_i(JobName, Plan, Nodes) of + true -> Acc + 1; + false -> Acc + end; + false -> Acc + end + end, 0, JobPlans) + end. + +-spec job_down_condition(binary()) -> boolean(). +job_down_condition(JobName) when is_binary(JobName) -> + case imem_meta:select(dperlJob, [{#dperlJob{enabled = '$1', name = JobName, _ = '_'}, [], ['$1']}]) of + {[], true} -> false; + {[IsEnabled], true} -> IsEnabled + end. + +job_down_count_i(JobName, _Plan, []) -> not job_down_condition(JobName); +job_down_count_i(JobName, on_all_nodes, Nodes) -> + (not job_down_condition(JobName)) andalso lists:member(node(), Nodes); +job_down_count_i(JobName, _Plan, Nodes) -> + (not job_down_condition(JobName)) andalso node() == hd(Nodes). + +to_json(Map) when is_map(Map) -> + maps:map(fun(password, _V) -> <<"*****">>; + (_K, V) when is_map(V) -> to_json(V); + (_K, V) when is_tuple(V) -> + case catch to_epoch(V) of + {'EXIT', _} -> tuple_to_list(V); + Epoch -> Epoch + end; + (_K, V) when is_list(V) -> + case catch format_list(V) of + {'EXIT', _} -> iolist_to_binary(io_lib:format("~p", [V])); + L -> L + end; + (_K, V) -> V + end, Map); +to_json(Term) -> Term. + +format_list(List) -> + case io_lib:printable_list(List) of + true -> list_to_bin(List); + false -> + lists:map(fun(L) when is_map(L) -> to_json(L); + (L) when is_tuple(L) -> format_list(tuple_to_list(L)); + (L) when is_list(L) -> format_list(L); + (<<>>) -> null; + (L) -> L + end, List) + end. + +to_epoch({_, T}) -> to_epoch(T); +to_epoch({Mega, Sec, _}) -> (Mega * 1000000) + Sec. + +ensure_json(Value) when is_binary(Value) -> bin_to_list(Value); +ensure_json(Value) when is_atom(Value) -> atom_to_list(Value); +ensure_json(Value) -> Value. + +is_ignored_job(JobName, IgnoredJobs) when is_binary(JobName) -> + is_ignored_job(bin_to_list(JobName), IgnoredJobs); +is_ignored_job(JobName, IgnoredJobs) when is_list(JobName) -> + lists:member(JobName, IgnoredJobs). + +get_ignored_jobs(ExcludedJobs) -> + {NeverRunJobs, true} = imem_meta:select(dperlJob, [{#dperlJob{running = undefined, + name = '$1', _ = '_'}, [], ['$1']}]), + lists:usort(?GET_IGNORED_JOBS_LIST ++ [bin_to_list(J) || J <- NeverRunJobs] ++ ExcludedJobs). + +is_status_timed_out(StatusTime) -> + imem_datatype:musec_diff(StatusTime) > ?JOB_DYN_STATUS_TIMEOUT. + +modules_to_jobs([]) -> []; +modules_to_jobs(Modules) -> + lists:foldl( + fun(Mod, Acc) -> + {Jobs, true} = imem_meta:select(dperlJob, [{#dperlJob{module = Mod, name = '$1', _ = '_'}, [], ['$1']}]), + Jobs ++ Acc + end, [], Modules). + +fetch_metric(node_memory, Channel) -> + FoldFun = + fun(#{ckey := [_, _, _, "system_info", Node, "system_info", "node_status"], + cvalue := #{free_memory := FreeMemory, total_memory := TotalMemory}}, Acc) -> + Acc#{list_to_atom(Node) => #{free_memory => FreeMemory, total_memory => TotalMemory}}; + (_, Acc) -> Acc + end, + fold_table(Channel, FoldFun); +fetch_metric(error_count, Channel) -> + FoldFun = + fun(#{ckey := [_, _, _, "system_info", Node, "system_info", "job_error_count"], + cvalue := #{job_error_count := Count}}, Acc) -> + Acc#{list_to_atom(Node) => Count}; + (_, Acc) -> Acc + end, + fold_table(Channel, FoldFun); +fetch_metric(heartbeat, Channel) -> + FoldFun = + fun(#{ckey := [_, _, _, "system_info", Node, "system_info", "heartbeat"], + cvalue := #{time := Time}}, Acc) -> + Acc#{list_to_atom(Node) => Time}; + (_, Acc) -> Acc + end, + fold_table(Channel, FoldFun); +fetch_metric(node_error, Channel) -> + FoldFun = + fun(#{ckey := [_, _, _, "system_info", Node, "error", _Type], cvalue := #{error := Error}}, Acc) -> + Acc#{list_to_atom(Node) => Error}; + (_, Acc) -> Acc + end, + fold_table(Channel, FoldFun). + +fold_table(Channel, FoldFun) -> + TableName = imem_dal_skvh:atom_table_name(<>), + TransactionFun = + fun() -> + FirstKey = imem_meta:first(TableName), + fold_table(TableName, FirstKey, FoldFun, #{}) + end, + case imem_meta:transaction(TransactionFun) of + {atomic, Result} -> Result; + ErrorResult -> + ?JError("Error fetching rows, result: ~p", [ErrorResult]) + end. + +fold_table(_Table, '$end_of_table', _FoldFun, Acc) -> Acc; +fold_table(Table, CurKey, FoldFun, Acc) -> + [RawRow] = imem_meta:read(Table, CurKey), + RowMap = imem_dal_skvh:skvh_rec_to_map(RawRow), + NewAcc = FoldFun(RowMap, Acc), + NextKey = imem_meta:next(Table, CurKey), + fold_table(Table, NextKey, FoldFun, NewAcc). + +-spec bin_to_list(binary() | list()) -> list(). +bin_to_list(L) when is_list(L) -> L; +bin_to_list(B) -> binary_to_list(B). + +-spec list_to_bin(binary() | list()) -> binary(). +list_to_bin(B) when is_binary(B) -> B; +list_to_bin(L) -> list_to_binary(L). \ No newline at end of file diff --git a/src/dperl_ora.erl b/src/dperl_ora.erl new file mode 100644 index 00000000..0936d039 --- /dev/null +++ b/src/dperl_ora.erl @@ -0,0 +1,703 @@ +-module(dperl_ora). + +-include("dperl_ora.hrl"). + +-behavior(dperl_worker). +-behavior(cowboy_middleware). +-behavior(cowboy_loop). + +% dperl_worker exports +-export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, + code_change/3, format_status/2, get_status/1, init_state/1]). + +-record(state, {name, resource, baseUrl, active_link = 1, stmt_profile_del, + stmt_ts_get, stmt_ts_post, stmt_ts_put, stmt_ts_reset, + stmt_ts_revert, stmt_bar_get, stmt_bar_put, stmt_bar_post, + pool, c_whitelist = #{}, listeners = [], tenants = #{}, port, + close_statement = [], close_session = [], dynstate = #{}}). + +% cowboy rest exports +-export([init/2, info/3, execute/2]). + +-define(API_VERSION, "1.0.0"). + +init_state(_) -> #state{}. + +get_status(#state{dynstate = DynState}) -> DynState. + +init({#dperlService{name = SName, args = Args, resource = Resource, + interface = Interface}, + State}) -> + case init_resources( + Resource, + State#state{name = SName, resource = Resource, + dynstate = #{start => imem_meta:time(), req => 0, + error => #{}}}) of + {ok, State1} -> + Interface1 = maps:merge(Interface, Args), + case init_interface(Interface1, State1) of + {ok, State2} -> + erlang:send_after(?SERVICE_UPDATE_PERIOD(SName), self(), update_dyn), + erlang:send_after(?SERVICE_STATUS_RESET_PERIOD(SName), self(), reset_dyn), + dperl_dal:update_service_dyn( + State2#state.name, State2#state.dynstate, + ?SERVICE_ACTIVE_THRESHOLD(SName), ?SERVICE_OVERLOAD_THRESHOLD(SName)), + {ok, State2}; + Error -> + erlocipool:del(State1#state.pool), + {stop, Error} + end; + Error -> {stop, Error} + end; +init({Args, _}) -> + ?SError("bad start parameters ~p", [Args]), + {stop, badarg}. + +handle_call(Request, _From, State) -> + ?SWarn("Unsupported handle_call ~p", [Request]), + {reply, ok, State}. + +% common +handle_cast(#{operation := {profile, delete}, reply := RespPid, + msisdn := Msisdn, tenant := Tenant, + data := #{<<"comment">> := Comment, + <<"requestor">> := Requestor}}, + #state{stmt_profile_del = ProfileDelStmt} = State) + when byte_size(Comment) =< ?MAX_COMMENT_LENGTH, + byte_size(Requestor) =< ?MAX_REQUESTOR_LENGTH -> + Resp = db_call({stmt_profile_del, ProfileDelStmt}, + [Tenant, Msisdn, Requestor, Comment]), + RespPid ! {reply, Resp}, + {noreply, State}; +%% topstopper +handle_cast(#{operation := {topstopper, get}, reply := RespPid, + msisdn := Msisdn, tenant := Tenant}, State) -> + Resp = db_call({stmt_ts_get, State#state.stmt_ts_get}, [Tenant, Msisdn]), + RespPid ! {reply, Resp}, + {noreply, State}; +handle_cast(#{operation := {topstopper, post}, reply := RespPid, + msisdn := Msisdn, tenant := Tenant, + data := #{<<"comment">> := Comment, + <<"requestor">> := Requestor}}, + #state{stmt_ts_post = TsPostStmt} = State) + when byte_size(Comment) =< ?MAX_COMMENT_LENGTH, + byte_size(Requestor) =< ?MAX_REQUESTOR_LENGTH -> + Resp = db_call({stmt_ts_post, TsPostStmt}, [Tenant, Msisdn, Requestor, + Comment]), + RespPid ! {reply, Resp}, + {noreply, State}; +handle_cast(#{operation := {topstopper, put}, reply := RespPid, + msisdn := Msisdn, tenant := Tenant, + data := #{<<"comment">> := Comment, + <<"requestor">> := Requestor, + <<"type">> := Type, + <<"limit">> := Limit}}, + #state{stmt_ts_put = TsPutStmt} = State) + when byte_size(Comment) =< ?MAX_COMMENT_LENGTH andalso + byte_size(Requestor) =< ?MAX_REQUESTOR_LENGTH andalso + (Type == <<"swisscom">> orelse Type == <<"customer">>) andalso + (Limit >= -1 andalso Limit =< 100000) -> + Resp = db_call({stmt_ts_put, TsPutStmt}, + [Tenant, Msisdn, Type, + dderloci_utils:oranumber_encode( + list_to_binary(io_lib:format("~p", [Limit]))), + Requestor, Comment]), + RespPid ! {reply, Resp}, + {noreply, State}; +handle_cast(#{operation := {topstopper, reset}, reply := RespPid, + msisdn := Msisdn, tenant := Tenant, + data := #{<<"comment">> := Comment, + <<"requestor">> := Requestor, + <<"type">> := Type}}, + #state{stmt_ts_reset = TsResetStmt} = State) + when byte_size(Comment) =< ?MAX_COMMENT_LENGTH andalso + byte_size(Requestor) =< ?MAX_REQUESTOR_LENGTH andalso + (Type == <<"swisscom">> orelse Type == <<"customer">>) -> + Resp = db_call({stmt_ts_reset, TsResetStmt}, [Tenant, Msisdn, Type, + Requestor, Comment]), + RespPid ! {reply, Resp}, + {noreply, State}; +handle_cast(#{operation := {topstopper, revert}, reply := RespPid, + msisdn := Msisdn, tenant := Tenant, + data := #{<<"comment">> := Comment, + <<"requestor">> := Requestor, + <<"type">> := Type}}, + #state{stmt_ts_revert = TsRevertStmt} = State) + when byte_size(Comment) =< ?MAX_COMMENT_LENGTH andalso + byte_size(Requestor) =< ?MAX_REQUESTOR_LENGTH andalso + (Type == <<"swisscom">> orelse Type == <<"customer">>) -> + Resp = db_call({stmt_ts_revert, TsRevertStmt}, [Tenant, Msisdn, Type, + Requestor, Comment]), + RespPid ! {reply, Resp}, + {noreply, State}; +%% barring +handle_cast(#{operation := {barring, get}, reply := RespPid, + msisdn := Msisdn, tenant := Tenant}, State) -> + Resp = db_call({stmt_bar_get, State#state.stmt_bar_get}, [Tenant, Msisdn]), + RespPid ! {reply, Resp}, + {noreply, State}; +handle_cast(#{operation := {barring, post}, reply := RespPid, + msisdn := Msisdn, tenant := Tenant, + data := #{<<"comment">> := Comment, + <<"requestor">> := Requestor}}, + #state{stmt_bar_post = BarPostStmt} = State) + when byte_size(Comment) =< ?MAX_COMMENT_LENGTH, + byte_size(Requestor) =< ?MAX_REQUESTOR_LENGTH -> + Resp = db_call({stmt_bar_post, BarPostStmt}, [Tenant, Msisdn, Comment, Requestor]), + RespPid ! {reply, Resp}, + {noreply, State}; +handle_cast(#{operation := {barring, put}, reply := RespPid, + msisdn := Msisdn, tenant := Tenant, + data := #{<<"type">> := Type, <<"barring">> := Barring, + <<"comment">> := Comment, <<"requestor">> := Requestor}}, + #state{stmt_bar_put = BarPutStmt} = State) + when byte_size(Comment) =< ?MAX_COMMENT_LENGTH andalso + byte_size(Requestor) =< ?MAX_REQUESTOR_LENGTH andalso + (Type == <<"swisscom">> orelse Type == <<"customer">>) andalso + (Barring == 0 orelse Barring == 6 orelse Barring == 9) -> + Resp = db_call({stmt_bar_put, BarPutStmt}, [Tenant, Msisdn, Type, Barring, Comment, Requestor]), + RespPid ! {reply, Resp}, + {noreply, State}; +%% +handle_cast(#{reply := RespPid}, State) -> + RespPid ! {reply, bad_req}, + {noreply, State}; +handle_cast(Request, State) -> + ?SWarn("Unsupported handle_cast ~p", [Request]), + {noreply, State}. + +-define(STMT_REBUILD(__SQL, __BINDS, __POOL, __STMT, __STATE), + (__STATE#state.__STMT):close(), + __STATE#state{__STMT = create_stmt(__POOL, __SQL, __BINDS)}). + +handle_info(update_dyn, #state{dynstate = Ds, name = SName} = State) -> + dperl_dal:update_service_dyn( + State#state.name, Ds, ?SERVICE_ACTIVE_THRESHOLD(SName), + ?SERVICE_OVERLOAD_THRESHOLD(SName)), + erlang:send_after(?SERVICE_UPDATE_PERIOD(SName), self(), update_dyn), + {noreply, State}; +handle_info(reset_dyn, #state{name = SName} = State) -> + NewDynState = (State#state.dynstate)#{req => 0, error => #{}}, + dperl_dal:update_service_dyn( + State#state.name, NewDynState, ?SERVICE_ACTIVE_THRESHOLD(SName), + ?SERVICE_OVERLOAD_THRESHOLD(SName)), + erlang:send_after(?SERVICE_STATUS_RESET_PERIOD(SName), self(), reset_dyn), + {noreply, State#state{dynstate = NewDynState}}; +handle_info({error, StmtType, Code, Message}, + #state{close_session = CloseSessionsErrors, listeners = Listeners, + close_statement = CloseStatementErrors, port = Port, + name = SName, pool = Pool} = State) -> + case lists:member(Code, CloseStatementErrors) of + true -> + ?SError("statement rebuild : ORA-~p ~s", [Code, Message]), + {noreply, + case StmtType of + stmt_profile_del -> + ?STMT_REBUILD(?PROFILE_DELETE_SQL, + ?PROFILE_DELETE_TOPSTOPPER_POST_BINDS, + Pool, stmt_profile_del, State); + stmt_ts_get -> + ?STMT_REBUILD(?TOPSTOPPER_GET_SQL, + ?TOPSTOPPER_BARRING_GET_BINDS, + Pool, stmt_ts_get, State); + stmt_ts_post -> + ?STMT_REBUILD(?TOPSTOPPER_POST_SQL, + ?PROFILE_DELETE_TOPSTOPPER_POST_BINDS, + Pool, stmt_ts_post, State); + stmt_ts_put -> + ?STMT_REBUILD(?TOPSTOPPER_PUT_SQL, + ?TOPSTOPPER_PUT_BINDS, + Pool, stmt_ts_put, State); + stmt_ts_reset -> + ?STMT_REBUILD(?TOPSTOPPER_RESET_SQL, + ?TOPSTOPPER_RESET_REVERT_BINDS, + Pool, stmt_ts_reset, State); + stmt_ts_revert -> + ?STMT_REBUILD(?TOPSTOPPER_REVERT_SQL, + ?TOPSTOPPER_RESET_REVERT_BINDS, + Pool, stmt_ts_revert, State); + stmt_bar_get -> + ?STMT_REBUILD(?BARRING_GET_SQL, + ?TOPSTOPPER_BARRING_GET_BINDS, + Pool, stmt_bar_get, State); + stmt_bar_post -> + ?STMT_REBUILD(?BARRING_POST_SQL, + ?BARRING_POST_BINDS, + Pool, stmt_bar_post, State); + stmt_bar_put -> + ?STMT_REBUILD(?BARRING_PUT_SQL, + ?BARRING_PUT_BINDS, + Pool, stmt_bar_put, State) + end}; + _ -> + case lists:member(Code, CloseSessionsErrors) of + true -> + ?SError("pool restart ORA-~p ~s", [Code, Message]), + lists:map( + fun(Ip) -> + case catch ranch:set_max_connections( + {Ip, Port}, 0) of + ok -> ok; + Error -> + ?SError("stop accept ~p on port ~p : ~p", + [Ip, Port, Error]) + end + end, Listeners), + erlocipool:del(Pool), + case init_resources(State#state.resource, State) of + {ok, State1} -> + lists:map( + fun(Ip) -> + case catch ranch:set_max_connections( + {Ip, Port}, ?SERVICE_MAXCONNS(SName)) of + ok -> ok; + Error -> + ?SError("start accept ~p on port ~p : ~p", + [Ip, Port, Error]) + end + end, Listeners), + {noreply, State1}; + Error -> {stop, {resource, Error}, State} + end; + _ -> + ?SError("Unhandled ~p : ~s", [Code, Message]), + {noreply, State} + end + end; +handle_info(count_request, #state{dynstate = Ds} = State) -> + NewDs = Ds#{req => maps:get(req, Ds, 0) + 1}, + {noreply, State#state{dynstate = NewDs}}; +handle_info({count_error, HttpRInt}, + #state{dynstate = #{error := Error} = Ds} = State) -> + NewDs = Ds#{error => Error#{HttpRInt => maps:get(HttpRInt, Error, 0) + 1}}, + {noreply, State#state{dynstate = NewDs}}; +handle_info(stop, State) -> + {stop, normal, State}; +handle_info(Request, State) -> + ?SWarn("Unsupported handle_info ~p", [Request]), + {noreply, State}. + +terminate(Reason, #state{listeners = Listeners, port = Port, pool = Pool}) -> + erlocipool:del(Pool), + stop_listeners(Reason, Listeners, Port), + ?SInfo("terminate ~p", [Reason]). + +code_change(OldVsn, State, Extra) -> + ?SInfo("code_change ~p: ~p", [OldVsn, Extra]), + {ok, State}. + +format_status(Opt, [PDict, State]) -> + ?SInfo("format_status ~p: ~p", [Opt, PDict]), + State. + +db_call({StmtType, Stmt}, Params) -> + self() ! count_request, + case Stmt:exec_stmt([list_to_tuple([?RESP_BUFFER|Params])]) of + {executed, _, [{_,<<"{\"errorCode\":",_:8,HttpR:3/binary,_/binary>>=Resp}]} -> + HttpRInt = binary_to_integer(HttpR), + if HttpRInt >= 400 andalso HttpRInt < 500 -> + ?SInfo("~p: ~s", [HttpRInt, Resp]); + HttpRInt >= 500 -> + ?SWarn("~p: ~s", [HttpRInt, Resp]) + end, + self() ! {count_error, HttpRInt}, + {HttpRInt, Resp}; + {executed, _, [{_,Resp}]} -> {200, Resp}; + {error, {Code, Message}} -> + self() ! {error, StmtType, Code, Message}, + {500, + #{errorCode => 2500, + errorMessage => <<"Internal Server Error">>, + errorDetails => Message}}; + {error, Reason} -> + ?SError("~p (~p) : ~p", [StmtType, Params, Reason]), + self() ! stop, + {500, + #{errorCode => 2500, + errorMessage => <<"Internal Server Error">>, + errorDetails => <<"See server error logs for details">>}} + end. + +init_interface(#{baseUrl := BaseUrl, commonWhitelist := CWhiteList, + listenerAddresses := LAddresses, tenants := Tenants, + port := Port, ssl := #{cert := Cert, key := Key}} = Intf, + #state{name = SName} = State) -> + MaxAcceptors = maps:get(max_acceptors, Intf, ?SERVICE_MAXACCEPTORS(SName)), + MaxConnections = maps:get(max_connections, Intf, ?SERVICE_MAXCONNS(SName)), + Opts = #{resource => self(), whitelist => maps:keys(CWhiteList), + tenants => Tenants, name => State#state.name}, + FilteredListenerIps = local_ips(LAddresses), + Base = + case hd(BaseUrl) of + $/ -> BaseUrl; + _ -> "/" ++ BaseUrl + end, + try + lists:map(fun(Ip) -> + Dispatch = + cowboy_router:compile( + [{'_', + [{Base++"/swagger/", ?MODULE, {swagger, Base, SName}}, + {Base++"/swagger/brand.json", cowboy_static, {priv_file, dperl, "brand.json"}}, + {Base++"/swagger/swisscom.png", cowboy_static, {priv_file, dperl, "swisscom.png"}}, + {Base++"/swagger/[...]", cowboy_static, {swagger_static, SName}}, + {Base++"/" ++ ?SERVICE_PROBE_URL(SName), ?MODULE, {'$probe', SName}}, + {Base, ?MODULE, {spec, SName}}, + {Base++"/:class/:msisdn", [{class, fun class_constraint/2}], ?MODULE, Opts}, + {Base++"/:msisdn", ?MODULE, Opts} + ]}] + ), + TransOpts = [{ip, Ip}, {port, Port}, + {num_acceptors, MaxAcceptors}, + {max_connections, MaxConnections}, + {versions, ['tlsv1.2','tlsv1.1',tlsv1]} + | imem_server:get_cert_key(Cert) + ++ imem_server:get_cert_key(Key)], + ProtoOpts = #{env => #{dispatch => Dispatch}, + middlewares => [cowboy_router, ?MODULE, cowboy_handler], + stream_handlers => [cowboy_compress_h, cowboy_stream_h]}, + {ok, P} = cowboy:start_tls({Ip, Port}, TransOpts, ProtoOpts), + ?SInfo("[~p] Activated https://~s:~p~s", + [P, inet:ntoa(Ip), Port, Base]) + end, FilteredListenerIps), + {ok, State#state{listeners = FilteredListenerIps, port = Port}} + catch + error : {badmatch,{error,{already_started,_}}} = Error -> + ?SError("error:~p~n~p", [Error, erlang:get_stacktrace()]), + stop_listeners(Error, FilteredListenerIps, Port), + init_interface(Intf, State); + Class:Error -> + ?SError("~p:~p~n~p", [Class, Error, erlang:get_stacktrace()]), + {error, Error} + end. + +class_constraint(format_error, Value) -> io_lib:format("The class ~p is not an valid.", [Value]); +class_constraint(_Type, <<"topstopper">>) -> {ok, topstopper}; +class_constraint(_Type, <<"barring">>) -> {ok, barring}; +class_constraint(_Type, _) -> {error, not_valid}. + +stop_listeners(Reason, Listerns, Port) -> + lists:map( + fun(Ip) -> + case catch cowboy:stop_listener({Ip, Port}) of + ok -> ok; + Error -> + ?SError("[~p] stopping listener ~p on port ~p : ~p", + [Reason, Ip, Port, Error]) + end + end, Listerns). + +local_ips(ListenerAddresses) -> + IntfIps = dderl:local_ipv4s(), + maps:fold( + fun({_, _, _, _} = Ip, _, Acc) -> + case lists:member(Ip, IntfIps) of + true -> [Ip | Acc]; + false -> Acc + end; + (_, _, Acc) -> Acc + end, [], ListenerAddresses + ). + +init_resources(#{credential := #{user := User, password := Password}, + links := Links} = Resources, + #state{active_link = ActiveLink} = State) -> + #{opt := Opts, tns := TNS} = lists:nth(ActiveLink, Links), + {error, unimplemented}. + %% TODO : reimplement without erlocipool + %Options = dperl_dal:oci_opts(?ERLOCIPOOL_LOG_CB, Opts), + %Pool = dperl_dal:get_pool_name(Resources, State#state.name), + %case proplists:get_value(sess_min, Options) of + % 0 -> {error, invalid_dbconn_pool_size}; + % _ -> + % case erlocipool:new(Pool, TNS, User, Password, Options) of + % {ok, _PoolPid} -> + % try + % CloseStatementErrors = maps:get(close_statement, + % Resources, []), + % CloseSessionsErrors = maps:get(close_session, + % Resources, []), + % if is_list(CloseStatementErrors) andalso + % is_list(CloseSessionsErrors) -> + % {ok, + % State#state{ + % stmt_profile_del + % = create_stmt(Pool, ?PROFILE_DELETE_SQL, + % ?PROFILE_DELETE_TOPSTOPPER_POST_BINDS), + % stmt_ts_get + % = create_stmt(Pool, ?TOPSTOPPER_GET_SQL, + % ?TOPSTOPPER_BARRING_GET_BINDS), + % stmt_ts_post + % = create_stmt(Pool, ?TOPSTOPPER_POST_SQL, + % ?PROFILE_DELETE_TOPSTOPPER_POST_BINDS), + % stmt_ts_put + % = create_stmt(Pool, ?TOPSTOPPER_PUT_SQL, + % ?TOPSTOPPER_PUT_BINDS), + % stmt_ts_reset + % = create_stmt(Pool, ?TOPSTOPPER_RESET_SQL, + % ?TOPSTOPPER_RESET_REVERT_BINDS), + % stmt_ts_revert + % = create_stmt(Pool, ?TOPSTOPPER_REVERT_SQL, + % ?TOPSTOPPER_RESET_REVERT_BINDS), + % stmt_bar_get + % = create_stmt(Pool, ?BARRING_GET_SQL, + % ?TOPSTOPPER_BARRING_GET_BINDS), + % stmt_bar_post + % = create_stmt(Pool, ?BARRING_POST_SQL, + % ?BARRING_POST_BINDS), + % stmt_bar_put + % = create_stmt(Pool, ?BARRING_PUT_SQL, + % ?BARRING_PUT_BINDS), + % pool = Pool, close_statement = CloseStatementErrors, + % close_session = CloseSessionsErrors}}; + % true -> + % {badarg, [close_statement, close_session]} + % end + % catch + % Class:Error -> + % ?SError("create statements failed : ~p. Deleing pool.", [{Class, Error}]), + % erlocipool:del(Pool), + % {Class, Error} + % end; + % {error, {already_started,_} = Error} -> + % ?SError("Pool ~p exists, restarting...", [Pool]), + % erlocipool:del(Pool), + % {error, Error}; + % {error, Error} -> + % erlocipool:del(Pool), + % init_resources( + % Resources, + % State#state{active_link = + % if length(Links) > ActiveLink -> + % ?SWarn("Pool ~p start : ", + % [Pool, Error]), + % ActiveLink + 1; + % true -> + % ?SError("Pool ~p start : ", + % [Pool, Error]), + % 1 + % end}) + % end + %end. + +-spec create_stmt(atom(), binary(), list()) -> tuple(). +create_stmt(Pool, Sql, Binds) -> + {error, unimplemented}. + %% TODO + % ?OciStmt(Pool, Sql, Binds, Stmt), + % Stmt. + +%% +%% Cowboy REST resource +%% + +-define(SERVER, "dperl AAA"). +-define(SPEC_FILE, "aaa.json"). +-include_lib("dderl/src/dderl_rest.hrl"). + +-define(E2400, + #{errorCode => 2400, + errorMessage => <<"Missing body">>, + errorDetails => <<"Missing request payload">>}). +-define(E1404, + #{errorCode => 1404, + errorMessage => <<"Not Found">>, + errorDetails => <<"Ressource not found, no AAA-Profile exists." + " Consider creating a default profile with" + " POST">>}). +-define(E1405, + #{errorCode => 1405, + errorMessage => <<"Method Not Allowed">>, + errorDetails => <<"HTTP method isn't allowed on this resource">>}). + +-define(E1403, + #{errorCode => 1403, + errorMessage => <<"Forbidden">>, + errorDetails => <<"No access to the requested service">>}). + +-define(JSON(__BODY), imem_json:encode(__BODY)). + +% applying Swagger whitelist through middleware +execute(Req, Env) -> + case maps:get(handler_opts, Env, none) of + {swagger_static, SName} -> + apply_swagger_whitelist( + Req, SName, Env#{handler_opts => {priv_dir, dderl, "public/swagger"}}); + {swagger, _, SName} -> apply_swagger_whitelist(Req, SName, Env); + {spec, SName} -> apply_swagger_whitelist(Req, SName, Env); + _ -> {ok, Req, Env} + end. + +apply_swagger_whitelist(Req, SName, Env) -> + {Ip, _Port} = cowboy_req:peer(Req), + case ?SWAGGER_WHITELIST(SName) of + #{Ip := _} -> {ok, Req, Env}; + WL when map_size(WL) == 0 -> {ok, Req, Env}; + _ -> + Req1 = cowboy_req:reply(403, ?REPLY_JSON_HEADERS, + ?JSON(?E1403), Req), + {stop, Req1} + end. + +init(Req, {'$probe', SName}) -> + {Code, Resp} = ?SERVICE_PROBE_RESP(SName), + Req1 = cowboy_req:reply(Code, ?REPLY_HEADERS, Resp, Req), + {ok, Req1, undefined}; +init(Req, {swagger, Base, _SName}) -> + Url = iolist_to_binary(cowboy_req:uri(Req)), + LastAt = byte_size(Url) - 1, + Req1 = + cowboy_req:reply( + 302, #{<<"cache-control">> => <<"no-cache">>, + <<"pragma">> => <<"no-cache">>, + <<"location">> => + list_to_binary( + [Url, case Url of + <<_:LastAt/binary, "/">> -> ""; + _ -> "/" + end, + "index.html?url="++ Base])}, + <<"Redirecting...">>, Req), + {ok, Req1, #state{}}; +init(Req, {spec, _SName}) -> + Req1 = + case cowboy_req:method(Req) of + <<"GET">> -> + {ok, Content} = file:read_file( + filename:join(dderl:priv_dir(dperl), + ?SPEC_FILE)), + cowboy_req:reply(200, ?REPLY_JSON_SPEC_HEADERS, Content, Req); + <<"OPTIONS">> -> + ACRHS = cowboy_req:header(<<"access-control-request-headers">>, Req), + cowboy_req:reply(200, + maps:merge(#{<<"allow">> => <<"GET,OPTIONS">>, + <<"access-control-allow-headers">> => ACRHS}, + ?REPLY_OPT_HEADERS), <<>>, Req); + Method-> + ?Error("~p not supported", [Method]), + cowboy_req:reply(405, ?REPLY_JSON_HEADERS, ?JSON(?E1405), Req) + end, + {ok, Req1, #state{}}; +init(Req0, #{whitelist := CWhiteList, tenants := Tenants, + name := ServiceName} = Opts)-> + put(name, ServiceName), + Req = Req0#{req_time => os:timestamp()}, + {Ip, _Port} = cowboy_req:peer(Req), + IpStr = inet:ntoa(Ip), + case cowboy_req:parse_header(<<"authorization">>, Req) of + {basic, Tenant, Password} -> + case Tenants of % user:password authorization check + #{Tenant := #{password := Password, permissions := Permissions, + whitelist := TWhiteLists}} -> + % whitelists check + case {is_ip_allowed(Ip, maps:keys(TWhiteLists)), + is_ip_allowed(Ip, CWhiteList)} of + {false, false} -> + ?SError("~s (~s) is not in tenants' whitelist", + [IpStr, Tenant]), + unauthorized(Req); + _ -> + Class = cowboy_req:binding(class, Req, <<>>), + Msisdn = cowboy_req:binding(msisdn, Req, <<>>), + Op = cowboy_req:method(Req), + Operation = get_operation(Class, Op, Req), + % operation permission check + case Permissions of + #{Operation := _} -> + push_request(Operation, Msisdn, Tenant, Req, Opts); + _ -> + ?SError("~s (~s) operation ~p not authorized", + [IpStr, Tenant, Operation]), + unauthorized(Req) + end + end; + Tenants -> + ?SError("~s (~s:~s) is not configured in tenants", + [IpStr, Tenant, Password]), + unauthorized(Req) + end; + Auth -> + ?SError("~s provided unsupported or bad authorization ~p", [IpStr, Auth]), + unauthorized(Req) + end. + +-spec is_ip_allowed(tuple(), list()) -> true | false. +is_ip_allowed(_Ip, []) -> true; +is_ip_allowed(Ip, WhiteList) -> lists:member(Ip, WhiteList). + +unauthorized(Req) -> + Req1 = cowboy_req:reply(403, ?REPLY_JSON_HEADERS, ?JSON(?E1403), Req), + {ok, Req1, undefined}. + +get_operation(<<>>, <<"DELETE">>, _Req) -> {profile, delete}; +get_operation(barring, <<"GET">>, _Req) -> {barring, get}; +get_operation(barring, <<"POST">>, _Req) -> {barring, post}; +get_operation(barring, <<"PUT">>, _Req) -> {barring, put}; +get_operation(topstopper, <<"GET">>, _Req) -> {topstopper, get}; +get_operation(topstopper, <<"PATCH">>, Req) -> + case cowboy_req:match_qs([{action, [], none}], Req) of + #{action := <<"reset">>} -> {topstopper, reset}; + #{action := <<"revert">>} -> {topstopper, revert}; + #{action := Other} -> {topstopper, Other} + end; +get_operation(topstopper, <<"POST">>, _Req) -> {topstopper, post}; +get_operation(topstopper, <<"PUT">>, _Req) -> {topstopper, put}; +get_operation(Class, Op, _) -> {Class, Op}. + +push_request({_, Op} = Operation, Msisdn, Tenant, Req0, #{resource := Service} = Opts) -> + CastReq = #{reply => self(), operation => Operation, + msisdn => Msisdn, tenant => Tenant}, + Req = Req0#{db_call => os:timestamp()}, + case cowboy_req:has_body(Req) of + false when Op == put; Op == post; Op == patch; Op == delete -> + Req1 = cowboy_req:reply(400, ?REPLY_JSON_HEADERS, ?JSON(?E2400), Req), + {ok, Req1, undefined}; + false when Op == get -> + ok = gen_server:cast(Service, CastReq), + {cowboy_loop, Req, Opts, hibernate}; + true -> + {ok, Body, Req1} = cowboy_req:read_body(Req), + case catch imem_json:decode(Body, [return_maps]) of + {'EXIT', Error} -> + ?SError("~p malformed with ~s : ~p", [Operation, Body, Error]), + Req2 = cowboy_req:reply(400, ?REPLY_JSON_HEADERS, ?JSON(?E2400), Req1), + {ok, Req2, undefined}; + BodyMap -> + ok = gen_server:cast(Service, CastReq#{data => BodyMap}), + {cowboy_loop, Req1, Opts, hibernate} + end; + HasBody -> + ?SError("~p with body ~p is not supported", [Operation, HasBody]), + Req1 = cowboy_req:reply(400, ?REPLY_JSON_HEADERS, ?JSON(?E2400), Req), + {ok, Req1, undefined} + end. + +info({reply, bad_req}, Req, State) -> info({reply, {400, <<>>}}, Req, State); +info({reply, not_found}, Req, State) -> + info({reply, {404, ?JSON(?E1404)}}, Req, State); +info({reply, {Code, Body}}, Req, State) when is_integer(Code), is_map(Body) -> + info({reply, {Code, imem_json:encode(Body)}}, Req, State); +info({reply, {Code, Body}}, Req, State) when is_integer(Code), is_binary(Body) -> + Req1 = cowboy_req:reply(Code, ?REPLY_JSON_HEADERS, Body, Req), + ReqTime = maps:get(req_time, Req1), + DbCall = maps:get(db_call, Req1), + Now = os:timestamp(), + Total = timer:now_diff(Now, ReqTime), + if Total > 0 -> + Op = cowboy_req:method(Req), + Class = cowboy_req:binding(class, Req, <<>>), + Msisdn = cowboy_req:binding(msisdn, Req, <<>>), + {Ip, Port} = cowboy_req:peer(Req), + IpStr = inet:ntoa(Ip), + if is_tuple(DbCall) -> + ?SDebug("~s:~p ~s ~p ~s : TotalTime (micros) ~p = ~p (ReqTime) + ~p (DBTime)", + [IpStr, Port, Op, Class, Msisdn, Total, + timer:now_diff(DbCall, ReqTime), + timer:now_diff(Now, DbCall)]); + true -> + ?SDebug("~s:~p ~s ~p ~s : TotalTime ~p micros", + [IpStr, Port, Op, Class, Msisdn, Total]) + end; + true -> ok + end, + {stop, Req1, State}. diff --git a/src/dperl_ora.hrl b/src/dperl_ora.hrl new file mode 100644 index 00000000..435bb385 --- /dev/null +++ b/src/dperl_ora.hrl @@ -0,0 +1,215 @@ +-ifndef(_dperl_ora_). +-define(_dperl_ora_, true). + +-include("dperl.hrl"). + +-define(MAX_COMMENT_LENGTH, 200). +-define(MAX_REQUESTOR_LENGTH, 20). + +-define(RESP_BUFFER, list_to_binary(lists:duplicate(1024, 0))). + +-define(SERVICE_MAXACCEPTORS(__SERVICE_NAME), + ?GET_CONFIG(maxNumberOfAcceptors, [__SERVICE_NAME], 100, + "Maximum number of TCP acceptors") + ). + +-define(SERVICE_MAXCONNS(__SERVICE_NAME), + ?GET_CONFIG(maxNumberOfSockets, [__SERVICE_NAME], 5000, + "Maximum number of simulteneous connections") + ). + +-define(SERVICE_PROBE_URL(__SERVICE_NAME), + ?GET_CONFIG(probeUrl, [__SERVICE_NAME], "/probe.html", + "Defines the url of the probe for the load balancer") + ). + +-define(SERVICE_PROBE_RESP(__SERVICE_NAME), + ?GET_CONFIG(probeResp, [__SERVICE_NAME], + {200, + <<"" + "Service is alive" + "">>}, + "Response given to the load balancer when the probeUrl" + " is requested") + ). + +-define(SERVICE_UPDATE_PERIOD(__SERVICE_NAME), + ?GET_CONFIG(serviceDynUpdatePeriod, [__SERVICE_NAME], 2000, + "Delay in millisecond between a service updates its DYN" + " info") + ). + +-define(SERVICE_STATUS_RESET_PERIOD(__SERVICE_NAME), + ?GET_CONFIG(serviceDynResetPeriod, [__SERVICE_NAME], 3600 * 1000, + "Delay in millisecond between a service resets its DYN" + " info") + ). + +-define(SERVICE_ACTIVE_THRESHOLD(__SERVICE_NAME), + ?GET_CONFIG(serviceStatusActiveThreshold, [__SERVICE_NAME], 1, + "Request count threshhold beyond which service is marked" + " as active in DYN") + ). + +-define(SERVICE_OVERLOAD_THRESHOLD(__SERVICE_NAME), + ?GET_CONFIG(serviceStatusOverloadThreshold, [__SERVICE_NAME], 100, + "Request count threshhold beyond which service is marked" + " as overloaded in DYN") + ). + +-define(SWAGGER_WHITELIST(__SERVICE_NAME), + ?GET_CONFIG(swaggerWhitelist, [__SERVICE_NAME], #{}, + "Whitelist for SwaggerClient Access")). + +%% sqls + +%% common sqls and binds +-define(PROFILE_DELETE_SQL, <<" +BEGIN + :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_profile_msisdn_delete( + ProvisioningTenant=>:SQLT_CHR_APP, + Msisdn=>:SQLT_CHR_MSISDN, + Requestor=>:SQLT_CHR_REQUESTOR, + Remark=>:SQLT_CHR_REMARK + ); +END; +">>). + +%% topstopper sqls and binds +-define(TOPSTOPPER_GET_SQL, <<" +BEGIN + :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_ts_msisdn_get( + ProvisioningTenant=>:SQLT_CHR_APP, + Msisdn=>:SQLT_CHR_MSISDN); +END; +">>). + +-define(TOPSTOPPER_POST_SQL, <<" +BEGIN + :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_ts_msisdn_post( + ProvisioningTenant=>:SQLT_CHR_APP, + Msisdn=>:SQLT_CHR_MSISDN, + Requestor=>:SQLT_CHR_REQUESTOR, + Remark=>:SQLT_CHR_REMARK + ); +END; +">>). + +-define(PROFILE_DELETE_TOPSTOPPER_POST_BINDS, + [{<<":SQLT_CHR_OUT_RESULT">>, out, 'SQLT_CHR'}, + {<<":SQLT_CHR_APP">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_MSISDN">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_REQUESTOR">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_REMARK">>, in, 'SQLT_CHR'}]). + +-define(TOPSTOPPER_PUT_SQL, <<" +BEGIN + :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_ts_msisdn_put( + ProvisioningTenant=>:SQLT_CHR_APP, + Msisdn=>:SQLT_CHR_MSISDN, + BarringType=>:SQLT_CHR_BARRINGTYPE, + TopStopLimit=>:SQLT_VNU_TOPSTOPLIMIT, + Requestor=>:SQLT_CHR_REQUESTOR, + Remark=>:SQLT_CHR_REMARK + ); +END; +">>). + +-define(TOPSTOPPER_PUT_BINDS, + [{<<":SQLT_CHR_OUT_RESULT">>, out, 'SQLT_CHR'}, + {<<":SQLT_CHR_APP">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_MSISDN">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_BARRINGTYPE">>, in, 'SQLT_CHR'}, + {<<":SQLT_VNU_TOPSTOPLIMIT">>, in, 'SQLT_VNU'}, + {<<":SQLT_CHR_REQUESTOR">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_REMARK">>, in, 'SQLT_CHR'}]). + +-define(TOPSTOPPER_RESET_SQL, <<" +BEGIN + :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_ts_msisdn_reset( + ProvisioningTenant=>:SQLT_CHR_APP, + Msisdn=>:SQLT_CHR_MSISDN, + BarringType=>:SQLT_CHR_BARRINGTYPE, + Requestor=>:SQLT_CHR_REQUESTOR, + Remark=>:SQLT_CHR_REMARK + ); +END; +">>). + + +-define(TOPSTOPPER_REVERT_SQL, <<" +BEGIN + :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_ts_msisdn_revert( + ProvisioningTenant=>:SQLT_CHR_APP, + Msisdn=>:SQLT_CHR_MSISDN, + BarringType=>:SQLT_CHR_BARRINGTYPE, + Requestor=>:SQLT_CHR_REQUESTOR, + Remark=>:SQLT_CHR_REMARK + ); +END; +">>). + +-define(TOPSTOPPER_RESET_REVERT_BINDS, + [{<<":SQLT_CHR_OUT_RESULT">>, out, 'SQLT_CHR'}, + {<<":SQLT_CHR_APP">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_MSISDN">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_BARRINGTYPE">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_REQUESTOR">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_REMARK">>, in, 'SQLT_CHR'}]). + +%% barring sqls and binds +-define(BARRING_GET_SQL, <<" +BEGIN + :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_barring_msisdn_get( + ProvisioningTenant=>:SQLT_CHR_APP, + Msisdn=>:SQLT_CHR_MSISDN + ); +END; +">>). + +-define(TOPSTOPPER_BARRING_GET_BINDS, + [{<<":SQLT_CHR_OUT_RESULT">>, out, 'SQLT_CHR'}, + {<<":SQLT_CHR_APP">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_MSISDN">>, in, 'SQLT_CHR'}]). + +-define(BARRING_POST_SQL, <<" +BEGIN + :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_barring_msisdn_post( + ProvisioningTenant=>:SQLT_CHR_APP, + Msisdn=>:SQLT_CHR_MSISDN, + Requestor=>:SQLT_CHR_REQUESTOR, + Remark=>:SQLT_CHR_REMARK + ); +END; +">>). + +-define(BARRING_POST_BINDS, + [{<<":SQLT_CHR_OUT_RESULT">>, out, 'SQLT_CHR'}, + {<<":SQLT_CHR_APP">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_MSISDN">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_REMARK">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_REQUESTOR">>, in, 'SQLT_CHR'}]). + +-define(BARRING_PUT_SQL, <<" +BEGIN + :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_barring_msisdn_put( + ProvisioningTenant=>:SQLT_CHR_APP, + Msisdn=>:SQLT_CHR_MSISDN, + BarringType=>:SQLT_CHR_BARRINGTYPE, + Requestor=>:SQLT_CHR_REQUESTOR, + BarringLevel=>:SQLT_INT_LEVEL, + Remark=>:SQLT_CHR_REMARK + ); +END; +">>). + +-define(BARRING_PUT_BINDS, + [{<<":SQLT_CHR_OUT_RESULT">>, out, 'SQLT_CHR'}, + {<<":SQLT_CHR_APP">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_MSISDN">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_BARRINGTYPE">>, in, 'SQLT_CHR'}, + {<<":SQLT_INT_LEVEL">>, in, 'SQLT_INT'}, + {<<":SQLT_CHR_REMARK">>, in, 'SQLT_CHR'}, + {<<":SQLT_CHR_REQUESTOR">>, in, 'SQLT_CHR'}]). + +-endif. %_dperl_ora_ diff --git a/src/dperl_skvh_copy.erl b/src/dperl_skvh_copy.erl new file mode 100644 index 00000000..e8d1d839 --- /dev/null +++ b/src/dperl_skvh_copy.erl @@ -0,0 +1,255 @@ +-module(dperl_skvh_copy). + +-include("dperl.hrl"). + +-behavior(dperl_worker). +-behavior(dperl_strategy_scr). + +% dperl_job exports +-export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, + code_change/3, format_status/2, get_status/1, init_state/1]). + +-record(state, {name, srcImemSess, srcActLink = 1, srcCred, srcLinks, sbucket, + maxKey, minKey, dstImemSess, dstActLink = 1, dstCred, dstLinks, + dbucket, dbucketOpts = [], isDLocal = false, isSLocal = false, + first_sync = true, rows = [], audit_start_time = {0,0}}). + +% dperl_strategy_scr export +-export([connect_check_src/1, get_source_events/2, connect_check_dst/1, + do_cleanup/5, do_refresh/2, load_src_after_key/3, load_dst_after_key/3, + fetch_src/2, fetch_dst/2, delete_dst/2, insert_dst/3, update_dst/3, + report_status/3]). + +connect_check_src(#state{isSLocal = true} = State) -> {ok, State}; +connect_check_src(#state{srcActLink = ActiveLink, srcLinks = Links, + srcCred = #{user := User, password := Password}, + srcImemSess = OldSession} = State) -> + case connect_imem(ActiveLink, Links, User, Password, OldSession) of + {ok, OldSession} -> {ok, State}; + {ok, Session} -> {ok, State#state{srcImemSess = Session}}; + {error, Error, NewActiveLink} -> + {error, Error, State#state{srcActLink = NewActiveLink}} + end. + +connect_check_dst(#state{isDLocal = true, dbucketOpts = DBucketOpts, + dbucket = DstBucket} = State) -> + check_channel(undefined, true, DstBucket, DBucketOpts), + {ok, State}; +connect_check_dst(#state{dstActLink = ActiveLink, dstLinks = Links, + dstCred = #{user := User, password := Password}, + dstImemSess = OldSession, dbucketOpts = DBucketOpts, + dbucket = DstBucket, isDLocal = IsDLocal} = State) -> + case connect_imem(ActiveLink, Links, User, Password, OldSession) of + {ok, OldSession} -> {ok, State}; + {ok, Session} -> + check_channel(Session, IsDLocal, DstBucket, DBucketOpts), + {ok, State#state{dstImemSess = Session}}; + {error, Error, NewActiveLink} -> + {error, Error, State#state{dstActLink = NewActiveLink}} + end. + +get_source_events(#state{srcImemSess = Session, sbucket = SrcBucket, isSLocal = IsSLocal, + audit_start_time = LastStartTime} = State, BulkSize) -> + case length(State#state.rows) > 0 of + true -> {ok, State#state.rows, State#state{rows = []}}; + _ -> + case catch run_cmd(audit_readGT, [SrcBucket, LastStartTime, BulkSize], + IsSLocal, Session) of + {error, _} = Error -> Error; + {'EXIT', Error} -> {error, Error}; + [] -> + if State#state.first_sync == true -> + ?JInfo("Audit rollup is complete"), + {ok, sync_complete, State#state{first_sync = false}}; + true -> {ok, sync_complete, State} + end; + Audits -> + #{time := NextStartTime} = lists:last(Audits), + {ok, filter_keys(Audits, State), State#state{ + audit_start_time = NextStartTime}} + end + end. + +load_src_after_key(CurKey, BlkCount, + #state{srcImemSess = SSession, sbucket = SrcBucket, + maxKey = MaxKey, minKey = MinKey, isSLocal = IsSLocal}) -> + load_after_key(SrcBucket, CurKey, MaxKey, MinKey, BlkCount, IsSLocal, SSession). + +load_dst_after_key(CurKey, BlkCount, + #state{dstImemSess = DSession, dbucket = DstBucket, + maxKey = MaxKey, minKey = MinKey, isDLocal = IsDLocal}) -> + load_after_key(DstBucket, CurKey, MaxKey, MinKey, BlkCount, IsDLocal, DSession). + +do_cleanup(Deletes, Inserts, Diffs, IsFinished, State) -> + NewState = State#state{rows = Inserts ++ Diffs ++ Deletes}, + if IsFinished -> {ok, finish, NewState}; + true -> {ok, NewState} + end. + +do_refresh(_State, _) -> error(not_implemented). + +fetch_src(Key, #state{srcImemSess = Session, sbucket = Bucket, isSLocal = IsLocal}) -> + dperl_dal:job_error_close(Key), + get_value(<<"fetch_src">>, Session, IsLocal, Bucket, Key). + +fetch_dst(Key, #state{dstImemSess = Session, dbucket = Bucket, isDLocal = IsLocal}) -> + get_value(<<"fetch_src">>, Session, IsLocal, Bucket, Key). + +delete_dst(Key, #state{dstImemSess = Session, dbucket = Bucket, isDLocal = IsLocal} = State) -> + case run_cmd(delete, [Bucket, imem_datatype:term_to_io(Key)], IsLocal, Session) of + {error, Error} -> + ?JError("imem_dal_skvh:delete(~p,[~p]) ~p", + [Bucket, imem_datatype:term_to_io(Key), Error]), + dperl_dal:job_error(Key, <<"sync">>, <<"delete">>, Error), + {true, State}; + _ -> {false, State} + end. + +insert_dst(Key, Val, State) -> update_dst(Key, Val, State). + +update_dst(Key, Val, #state{dstImemSess = Session, dbucket = Bucket, isDLocal = IsLocal} = State) -> + case catch run_cmd(write, [Bucket, Key, Val], IsLocal, Session) of + {error, Error} -> + ?JError("imem_dal_skvh:write(~p,~p,~p) ~p", + [Bucket, Key, Val, Error]), + dperl_dal:job_error(Key, <<"sync">>, <<"update_dst">>, Error), + {true, State}; + _ -> {false, State} + end. + +report_status(_Key, _Status, _State) -> no_op. + +get_status(#state{audit_start_time = LastAuditTime}) -> + #{lastAuditTime => LastAuditTime}. + +init_state([]) -> #state{}; +init_state([#dperlNodeJobDyn{state = #{lastAuditTime := LastAuditTime}} | _]) -> + #state{audit_start_time = LastAuditTime}; +init_state([_ | Others]) -> + init_state(Others). + +init({#dperlJob{name=Name, srcArgs = SrcArgs, dstArgs = DstArgs}, State}) -> + ?JInfo("Starting ~s...", [Name]), + DBucketOpts = maps:get(dbucketOpts, DstArgs, [audit,history]), + State1 = load_src_args(SrcArgs, State), + State2 = load_dst_args(DstArgs, State1), + MinKey = maps:get(minKey, SrcArgs, -1), + MaxKey = maps:get(maxKey, SrcArgs, [<<255>>]), + {ok, State2#state{ + name = Name, maxKey = MaxKey, minKey = MinKey, + dbucketOpts = DBucketOpts}}; +init({Args, _}) -> + ?JError("bad start parameters ~p", [Args]), + {stop, badarg}. + +handle_call(Request, _From, State) -> + ?JWarn("Unsupported handle_call ~p", [Request]), + {reply, ok, State}. + +handle_cast(Request, State) -> + ?JWarn("Unsupported handle_cast ~p", [Request]), + {noreply, State}. + +handle_info(Request, State) -> + ?JWarn("Unsupported handle_info ~p", [Request]), + {noreply, State}. + +terminate(Reason, #state{srcImemSess = SrcSession, dstImemSess = DstSession}) -> + try + [Session:close() || Session <- [SrcSession, DstSession], Session /= undefined], + ?JInfo("terminate ~p", [Reason]) + catch + _:Error -> + dperl_dal:job_error(<<"terminate">>, <<"terminate">>, Error), + ?JError("terminate ~p:~p ~p", + [Reason, Error, erlang:get_stacktrace()]) + end. + +code_change(OldVsn, State, Extra) -> + ?JInfo("code_change ~p: ~p", [OldVsn, Extra]), + {ok, State}. + +format_status(Opt, [PDict, State]) -> + ?JInfo("format_status ~p: ~p", [Opt, PDict]), + State. + +connect_imem(_ActiveLink, [], _User, _Password, _OldSession) -> {error, no_links, -1}; +connect_imem(ActiveLink, Links, User, Password, OldSession) -> + #{schema := Schema} = lists:nth(ActiveLink, Links), + case catch OldSession:run_cmd(schema, []) of + Schema -> {ok, OldSession}; + _ -> + catch OldSession:close(), + case dperl_dal:connect_imem_link( + ActiveLink, Links, User, Password) of + {ok, Session, _} -> + {ok, Session}; + {Error, NewActiveLink} -> + {error, Error, NewActiveLink} + end + end. + +check_channel(Session, IsLocal, Bucket, BucketOpts) -> + case catch run_cmd(create_check_skvh, [Bucket, BucketOpts], IsLocal, Session) of + {'EXIT', Error} -> {error, Error}; + {error, Error} -> + ?JError("imem_dal_skvh:create_check_skvh(~p,~p) ~p", + [Bucket, BucketOpts, Error]), + {error, Error}; + _ -> ok + end. + +get_value(Op, Session, IsLocal, Bucket, Key) -> + case run_cmd(read, [Bucket, [Key]], IsLocal, Session) of + [] -> ?NOT_FOUND; + [#{cvalue := Value}] -> Value; + {error, Error} -> + ?JError("imem_dal_skvh:read(~p,~p) ~p", [Bucket, [Key], Error]), + dperl_dal:job_error(Key, <<"sync">>, Op, Error), + error + end. + +load_src_args(#{channel := SChannel}, State) -> + State#state{sbucket = to_binary(SChannel), isSLocal = true}; +load_src_args(#{credential := SrcCred, links := SrcLinks, sbucket := SBucket}, State) -> + State#state{sbucket = to_binary(SBucket), srcCred = SrcCred, srcLinks = SrcLinks}. + +load_dst_args(#{channel := DChannel}, State) -> + State#state{dbucket = to_binary(DChannel), isDLocal = true}; +load_dst_args(#{credential := DstCred, links := DstLinks, dbucket := DBucket}, State) -> + State#state{dbucket = to_binary(DBucket), dstCred = DstCred, dstLinks = DstLinks}. + +to_binary(Channel) when is_list(Channel) -> list_to_binary(Channel); +to_binary(Channel) when is_binary(Channel) -> Channel; +to_binary(Channel) -> Channel. + +run_cmd(Cmd, Args, true, _) -> erlang:apply(imem_dal_skvh, Cmd, [system | Args]); +run_cmd(Cmd, Args, _, Session) -> Session:run_cmd(dal_exec, [imem_dal_skvh, Cmd, Args]). + +load_after_key(Bucket, CurKey, MaxKey, MinKey, BlkCount, IsLocal, Session) when MinKey == CurKey -> + get_key_hashes(Bucket, CurKey, MaxKey, BlkCount, IsLocal, Session, MinKey == CurKey); +load_after_key(Bucket, CurKey, MaxKey, MinKey, BlkCount, IsLocal, Session) when is_integer(CurKey) -> + load_after_key(Bucket, MinKey, MaxKey, MinKey, BlkCount, IsLocal, Session); +load_after_key(Bucket, CurKey, MaxKey, MinKey, BlkCount, IsLocal, Session) -> + get_key_hashes(Bucket, CurKey, MaxKey, BlkCount, IsLocal, Session, MinKey == CurKey). + +get_key_hashes(Bucket, FromKey, MaxKey, Count, IsLocal, Session, IsFirstTime) -> + case catch run_cmd(readGELTHashes, [Bucket, FromKey, MaxKey, Count + 1], + IsLocal, Session) of + [{FromKey, _} | Keys] = KeyHashes -> + if IsFirstTime -> lists:sublist(KeyHashes, Count); + true -> Keys + end; + KeyHashes when is_list(KeyHashes) -> lists:sublist(KeyHashes, Count); + Error -> error(Error) + end. + +filter_keys(Keys, #state{minKey = MinKey, maxKey = MaxKey}) -> + lists:foldr( + fun(#{ckey := Key}, Acc) when Key >= MinKey andalso Key =< MaxKey -> + case lists:member(Key, Acc) of + true -> Acc; + false -> [Key | Acc] + end; + (_, Acc) -> Acc + end, [], Keys). \ No newline at end of file diff --git a/src/dperl_status.hrl b/src/dperl_status.hrl new file mode 100644 index 00000000..0546dced --- /dev/null +++ b/src/dperl_status.hrl @@ -0,0 +1,57 @@ +-ifndef(_dperl_STATUS_HRL_). +-define(_dperl_STATUS_HRL_, true). + +-include("dperl.hrl"). + +-record(context, { + name :: list(), + channel :: binary(), + aggr_channel :: binary(), + metrics :: map(), + focus :: map(), + heart_interval = 10000 :: integer(), + aggregators = #{} :: map(), + node_collector :: list(), + stale_time :: integer(), + cpu_overload_cnt = 0 :: integer(), + mem_overload_cnt = 0 :: integer(), + eval_suspend_cnt = 0 :: integer(), + errors = [] :: list(), + received_cnt = 0 :: integer(), + requested_cnt = 0 :: integer(), + success_cnt = 0 :: integer(), + sync_cnt = 0 :: integer(), + active_link = 1 :: integer(), + imem_sess = undefined :: undefined | tuple(), + connection = #{} :: map(), + target_node :: atom(), + requested_metrics = #{} :: map(), %% list of metrics that have been requested + debug = none :: atom(), %% none or detail or simple + is_loaded = false :: boolean(), + sync_only = false :: boolean() %% set to true when refresh and cleanup are false + }). + +-define(STALE_TIME(__MODULE, __JOB_NAME), + ?GET_CONFIG(cleanupStaleTimeout, [__MODULE, __JOB_NAME], 60000, "Cleanup Threshold time to delete rows after checking heartbeat") + ). + +-define(MAX_MEMORY_THRESHOLD(__NODE), + ?GET_CONFIG(maxMemoryThreshold, [__NODE], 70, "Memory Threshold percentage above which should cause high memory error, configurable per node") + ). + +-define(METRIC_WAIT_THRESHOLD(__JOB_NAME), + ?GET_CONFIG(metricWaitTimeout, [__JOB_NAME], 300, "Max wait time for the agent to respond to a metric request is in seconds") + ). + +-define(METRICS_TAB(__NAME), list_to_atom("metrics_tab_" ++ __NAME)). + +-define(AGGR, <<"_AGGR">>). %% Aggregator skvh table ending. user with dashboard table ex: dperlS_AGGR + +-record(dperl_metrics, {key :: tuple(), + agg_round :: list(), + agg_precise :: list(), + state :: term(), + time :: ddTimestamp() + }). + +-endif. diff --git a/src/dperl_status_agr.erl b/src/dperl_status_agr.erl new file mode 100644 index 00000000..fe17251e --- /dev/null +++ b/src/dperl_status_agr.erl @@ -0,0 +1,740 @@ +-module(dperl_status_agr). + +-include("dperl_status.hrl"). + +-export([sync_puller_error/5, check_nodes/5, merge_node_errors/5, + check_memory_on_nodes/5, check_heartbeats/5, check_job_error/5, + check_job_down/5, write_to_all/5, write_to_aggr/5, write_to_channel/5, + format_system_memory/5, sessions_rate/5, restricted_sessions_rate/5, + upstream_requests_rate/5, downstream_requests_rate/5, + focus_requests_rate/5, check_memory_on_node/5]). + +-safe([sync_puller_error/5, check_nodes/5, merge_node_errors/5, + check_memory_on_nodes/5, check_heartbeats/5, check_job_down/5, + check_job_error/5, write_to_all/5, write_to_aggr/5, write_to_channel/5, + format_system_memory/5, sessions_rate/5, restricted_sessions_rate/5, + upstream_requests_rate/5, downstream_requests_rate/5, + focus_requests_rate/5, check_memory_on_node/5]). + +-define(PUSHER_MAP_CACHE_TIMEOUT, 300). %% 5 minutes after which the job map in the state is rebuild + +%% Node level aggregator +sync_puller_error({_, {job_status, _}}, {Infos, _T}, #context{name = Name, channel = Channel}, {_, _, State, _}, _Opts) -> + NewState = + case State of + #{time := Time} -> + case imem_datatype:sec_diff(Time, imem_meta:time()) < ?PUSHER_MAP_CACHE_TIMEOUT of + true -> State; + false -> #{pushers => build_job_map(Channel), time => imem_meta:time()} + end; + _ -> #{pushers => build_job_map(Channel), time => imem_meta:time()} + end, + Pushers = maps:get(pushers, NewState, #{}), + NewInfos = + lists:map( + fun(#{cvalue := #{status := stopped}} = Info) -> Info; + (#{ckey := [_, _, _, "job_status", Job], cvalue := Status} = Info) -> + case Pushers of + #{Job := puller} -> Info; + #{Job := Pullers} -> + Info#{cvalue => update_push_status(Pullers, Name, Channel, Status)}; + _ -> Info + end; + (Info) -> Info + end, Infos), + {NewInfos, [], NewState}. + +%% Node level aggregator +check_nodes({_, MKey}, {Infos, _T}, #context{channel = Channel}, {_, _, State, _}, Opts) + when MKey == erlang_nodes; MKey == data_nodes-> + AggrMetrics = + lists:foldl( + fun(#{ckey := [_Pla, "system_info", Node, "system_info", "erlang_nodes"] = Key, + cvalue := #{nodes := Nodes, required_nodes := RNodes}}, Acc) -> + check_nodes_internal(Channel, Key, Node, Nodes, RNodes) ++ Acc; + (#{ckey := [_Pla, "system_info", Node, "system_info", "data_nodes"] = Key, + cvalue := #{data_nodes := DNodes, required_nodes := RNodes}}, Acc) -> + Nodes = [N || #{node := N} <- DNodes], + check_nodes_internal(Channel, Key, Node, Nodes, RNodes) ++ Acc; + (_, Acc) -> Acc + end, [], Infos), + case Opts of + #{write_to_channel := true} -> {AggrMetrics, AggrMetrics, State}; + _ -> {[], AggrMetrics, State} + end. + +%% Master aggregator +merge_node_errors({_, {agr, node_error, _}}, {[#{cvalue := NodeErrors}], _T}, #context{channel = Channel}, {_, _, State, _}, _Opts) -> + ErrorKey = [binary_to_list(Channel), "system_info", "aggregation", "error", "nodes"], + NErrors = + case lists:usort(maps:values(NodeErrors)) of + [] -> []; + [Error] -> [#{ckey => ErrorKey, cvalue => #{error => Error}}]; + Errors -> [#{ckey => ErrorKey, cvalue => #{error => Errors}}] + end, + {NErrors, [], State}. + +%% Master aggregator +check_memory_on_nodes({_, {agr, node_memory, _}}, {[#{cvalue := NodeStatuses}], _T}, #context{channel = Channel}, {_, _, State, _}, _Opts) -> + NodeMemErrors = + maps:fold( + fun(Node, Value, Acc) -> + case check_memory(Node, Value) of + false -> Acc; + Mem -> Acc#{Node => Mem} + end + end, #{}, NodeStatuses), + if map_size(NodeMemErrors) == 0 -> {[], [], State}; + true -> + ErrorKey = [binary_to_list(Channel), "system_info", "aggregation", "error", "memory"], + {[#{ckey => ErrorKey, cvalue => #{error => <<"Memory too high">>, info => NodeMemErrors}}], [], State} + end. + +%% Node level aggregator +check_memory_on_node({_, system_information}, {[#{ckey:= Key, cvalue := Value}], _T}, _Ctx, {_, _, State, _}, _Opts) -> + [Pla, "system_info", Node, "system_info", "node_status"] = Key, + case check_memory(Node, Value) of + false -> {[], [], State}; + Mem -> + {[#{ckey => [Pla, "system_info", Node, "error", "memory"], + cvalue => #{error => <<"Memory too high">>, info => Mem}}], [], State} + end. + +%% Master aggregator +check_heartbeats({_, {agr, heartbeat, _}}, {[#{cvalue := Heatbeats}], _T}, #context{channel = Channel, + node_collector= NodeCollecotor, stale_time = StaleTime}, {_, _, State, _}, _Opts) -> + {[Nodes], true} = imem_meta:select(dperlJob, [{#dperlJob{name = list_to_binary(NodeCollecotor), nodes = '$1', _ = '_'}, [], ['$1']}]), + MissingNodes = maps:fold( + fun(Node, Time, Acc) -> + case erlang:system_time(milli_seconds) - Time of + T when T > StaleTime -> Acc; + _ -> lists:delete(Node, Acc) + end + end, Nodes, Heatbeats), + case MissingNodes of + [] -> {[], [], State}; + MissingNodes -> + ErrorKey = [binary_to_list(Channel), "system_info", "aggregation", "error", "heartbeat"], + {[#{ckey => ErrorKey, cvalue => #{error => <<"Missing heartbeat">>, info => MissingNodes}}], [], State} + end. + +%% Master aggregator +check_job_error({_, {agr, error_count, _}}, {[#{cvalue := NodeErrors}], _T}, #context{channel = Channel}, {_, _, State, _}, _Opts) -> + ErrorMap = maps:fold( + fun(Node, Count, Acc) when Count > 0 -> Acc#{Node => Count}; + (_, _, Acc) -> Acc + end, #{}, NodeErrors), + if map_size(ErrorMap) > 0 -> + ErrorKey = [binary_to_list(Channel), "system_info", "aggregation", "error", "job_error_count"], + {[#{ckey => ErrorKey, cvalue => #{error => <<"Jobs with errors">>, info => ErrorMap}}], [], State}; + true -> {[], [], State} + end. + +%% Master aggregator +check_job_down({_, {job_down_count, _}}, {Infos, _T}, #context{channel = ChannelBin}, {_, _, State, _}, _Opts) -> + Channel = binary_to_list(ChannelBin), + DownMap = lists:foldl( + fun(#{ckey := [_Pla, "system_info", Node, "system_info", "job_down_count"], + cvalue := #{job_down_count := Count}}, _) when Count > 0 -> #{list_to_binary(Node) => Count}; + (_, Acc) -> Acc + end, #{}, Infos), + if map_size(DownMap) > 0 -> + ErrorKey = [Channel, "system_info", "aggregation", "error", "job_down_count"], + {[#{ckey => ErrorKey, cvalue => #{error => <<"Down jobs">>, info => DownMap}}], [], State}; + true -> {[], [], State} + end. + +format_system_memory({_, system_information}, {[#{cvalue := Value} = Info], _T}, _Ctx, {_, _, State, _}, _Opts) -> + #{free_memory := FreeMemory, + total_memory := TotalMemory, + erlang_memory := ErlMemory} = Value, + ValueRounded = Value#{free_memory => bytes_to_mb(FreeMemory), + total_memory => bytes_to_mb(TotalMemory), + erlang_memory => bytes_to_mb(ErlMemory)}, + {[Info#{cvalue => ValueRounded}], [Info], State}. + +write_to_all({_, _}, {Infos, _T}, _Ctx, {_, _, State, _}, _Opts) -> {Infos, Infos, State}. % system_information + +write_to_aggr({_, _}, {Infos, _T}, _Ctx, {_, _, State, _}, _Opts) -> {[], Infos, State}. % job_error_count + +write_to_channel({_, _}, {Infos, _T}, _Ctx, {_, _, State, _}, _Opts) -> {Infos, [], State}. % focus, jobs, errors + +%% mpro aggregators +sessions_rate({mpro_metrics, {sessions, _}}, {[#{ckey := Key, cvalue := #{sessions := Sessions}}], Time}, + _Ctx, {_, _, State, LastTime}, Opts) -> + calculate_rate(sessions_rate, Sessions, LastTime, Time, Key, check_opts(Opts), State). + +restricted_sessions_rate({mpro_metrics, {restricted_sessions, _}}, {[#{ckey := Key, cvalue := #{restricted_sessions := RSessions}}], Time}, + _Ctx, {_, _, State, LastTime}, Opts) -> + calculate_rate(restricted_sessions_rate, RSessions, LastTime, Time, Key, check_opts(Opts), State). + +upstream_requests_rate({mpro_metrics, {upstream_requests, _}}, {[#{ckey := Key, cvalue := #{upstream_requests := Requests}}], Time}, + _Ctx, {_, _, State, LastTime}, Opts) -> + calculate_rate(upstream_requests_rate, Requests, LastTime, Time, Key, check_opts(Opts), State). + +downstream_requests_rate({mpro_metrics, {downstream_requests, _}}, {[#{ckey := Key, cvalue := #{downstream_requests := Requests}}], Time}, + _Ctx, {_, _, State, LastTime}, Opts) -> + calculate_rate(downstream_requests_rate, Requests, LastTime, Time, Key, check_opts(Opts), State). + +-spec focus_requests_rate({atom, {atom, list(), list()}}, {list(), term()}, #context{}, tuple(), map()) -> {list(), list(), term()}. +focus_requests_rate({mpro_metrics, {focus, _Topic, _FocusKey}}, {[], _Time}, _Ctx, {_, _, State, _}, _Opts) -> + {[], [], State}; +focus_requests_rate({mpro_metrics, {focus, _Topic, _FocusKey}}, {Rows, Time}, _Ctx, {_, _, State, LastTime}, Opts) -> + {Rates, NewState} = lists:foldl( + fun(#{ckey := K, cvalue := V} = I, {AccRates, AccState}) -> + {RateValue, RateState} = calculate_focus_rate(lists:last(K), V, AccState, LastTime, Time, check_opts(Opts)), + {[I#{cvalue => RateValue} | AccRates], RateState} + end, + {[], State}, Rows), + {Rates, [], NewState}. + +%% Internal helper functions +find_pullers(Channel) -> + case imem_meta:select(dperlJob, [{#dperlJob{name = '$1', dstArgs = '$2', _ = '_'}, + [{'==', '$2', #{channel => binary_to_list(Channel)}}], ['$1']}]) of + {[], true} -> puller; + {Pullers, true} -> Pullers + end. + +-spec update_push_status([binary()], list(), binary(), map()) -> map(). +update_push_status(Pullers, CollectorName, Channel, StatusValue) -> + Rows = imem_dal_skvh:read_deep(system, Channel, [[CollectorName]]), + update_push_status(get_error_jobs(Rows), Pullers, StatusValue). + +-spec update_push_status([binary()], [binary()], map()) -> map(). +update_push_status([], _Pullers, StatusValue) -> StatusValue; +update_push_status([ErrorJob | Rest], Pullers, StatusValue) -> + case lists:member(ErrorJob, Pullers) of + true -> StatusValue#{status => error}; + false -> update_push_status(Rest, Pullers, StatusValue) + end. + +-spec get_error_jobs([map()]) -> [binary()]. +get_error_jobs([]) -> []; +get_error_jobs([#{ckey := [_, _, _, _, _, "job_status", Name], cvalue := ValueBin} | Rest]) -> + case imem_json:decode(ValueBin, [return_maps]) of + #{<<"status">> := <<"error">>} -> [list_to_binary(Name) | get_error_jobs(Rest)]; + _ -> get_error_jobs(Rest) + end; +get_error_jobs([_Row | Rest]) -> get_error_jobs(Rest). + +check_nodes_internal(_Channel, _Key, _Node, Nodes, Nodes) -> []; +check_nodes_internal(Channel, Key, Node, Nodes, RequiredNodes) -> + case RequiredNodes -- Nodes of + [] -> []; + MissingNodes -> + ErrorKey = [binary_to_list(Channel), "system_info", Node, "error", lists:last(Key)], + Error = #{error => list_to_binary(io_lib:format("Missing nodes ~p", [MissingNodes]))}, + [#{ckey => ErrorKey, cvalue => Error}] + end. + +bytes_to_mb(Bytes) -> list_to_binary(lists:concat([Bytes div 1000000, "MB"])). + +calculate_rate(_RateName, V2, undefined, _T2, _Key, _Opts, State) -> + {[], [], State#{lastValue => V2, lastRate => undefined}}; +calculate_rate(_RateName, V2, T1, T2, _Key, _Opts, #{lastRate := undefined} = State) -> + V1 = maps:get(lastValue, State, 0), + Rate = calculate_rate(V1, V2, T1, T2), + {[], [], State#{lastValue => V2, lastRate => Rate}}; +calculate_rate(RateName, V2, T1, T2, Key, #{exp_factor := Factor}, #{lastRate := LRate} = State) when is_number(LRate) -> + V1 = maps:get(lastValue, State, 0), + RateKey = lists:sublist(Key, 3) ++ [atom_to_list(RateName), []], + Rate = apply_factor(calculate_rate(V1, V2, T1, T2), LRate, Factor), + RoundVal = [#{ckey => RateKey, cvalue => #{RateName => round(Rate)}}], + NewState = State#{lastValue => V2, lastRate => Rate}, + {RoundVal, [#{ckey => RateKey, cvalue => #{RateName => Rate}}], NewState}; +calculate_rate(_RateName, V2, _T1, _T2, _Key, _Opts, State) -> + {[], [], State#{lastValue => V2, lastRate => undefined}}. + +calculate_rate(Val, Val, _T1, _T2) -> 0; +calculate_rate(V1, V2, T1, T2) -> + (V2 - V1) * (1000000 / imem_datatype:musec_diff(T1, T2)). + +-spec check_opts(map()) -> map(). +check_opts(#{exp_factor := 1} = Opts) -> Opts; +check_opts(#{exp_factor := Factor} = Opts) when is_float(Factor), + Factor >= 0.5, Factor < 1 -> Opts; +check_opts(Opts) -> Opts#{exp_factor => 1}. + +-spec apply_factor(number(), number(), number()) -> number(). +apply_factor(Rate, _OldRate, 1) -> Rate; +apply_factor(Rate, OldRate, Factor) -> + Rate * Factor + OldRate * (1-Factor). + +-spec build_job_map(binary()) -> map(). +build_job_map(Channel) -> + TableName = imem_dal_skvh:atom_table_name(Channel), + JobMapFun = fun() -> + FirstKey = imem_meta:first(TableName), + build_job_map(TableName, FirstKey, #{}) + end, + case imem_meta:transaction(JobMapFun) of + {atomic, NewAcc} -> NewAcc; + ErrorResult -> + ?JError("Error building job map: ~p", [ErrorResult]), + #{} + end. + +-spec build_job_map(atom(), term() | atom(), map()) -> map(). +build_job_map(_TableName, '$end_of_table', Acc) -> Acc; +build_job_map(TableName, CurKey, Acc) -> + [RawRow] = imem_meta:read(TableName, CurKey), + NewAcc = + case imem_dal_skvh:skvh_rec_to_map(RawRow) of + #{ckey := [_, _, _, _, "cluster", "jobs", Job], cvalue := Val} -> + case imem_json:decode(Val, [return_maps]) of + #{<<"direction">> := <<"push">>, + <<"srcArgs">> := #{<<"channel">> := SChannel}} -> + Pullers = find_pullers(SChannel), + Acc#{Job => Pullers}; + _ -> Acc#{Job => puller} + end; + _ -> Acc + end, + NextKey = imem_meta:next(TableName, CurKey), + build_job_map(TableName, NextKey, NewAcc). + +-spec path_to_values(map()) -> [[term()]]. +path_to_values(Map) -> + path_to_values(maps:keys(Map), Map, []). + +-spec path_to_values(list(), map(), list()) -> [[term()]]. +path_to_values([], _, _Acc) -> []; +path_to_values([Key | Keys], Map, Acc) -> + NewAcc = [Key | Acc], + S = case Map of + #{Key := Value} when is_map(Value) -> + path_to_values(maps:keys(Value), Value, NewAcc); + _ -> [lists:reverse(NewAcc)] + end, + S ++ path_to_values(Keys, Map, Acc). + +-spec calculate_focus_rate(list(), map(), map(), term(), term(), map()) -> {map(), map()}. +calculate_focus_rate(Protocol, Value, State, undefined, Time, Opts) when map_size(State) =:= 0 -> + calculate_focus_rate(Protocol, Value, #{lastValue => #{}, lastRate => #{}}, undefined, Time, Opts); +calculate_focus_rate(Protocol, Value, #{lastValue := LValue} = State, undefined, _Time, _Opts) -> + {Value, State#{lastValue => LValue#{Protocol => Value}}}; +calculate_focus_rate(Protocol, Value, #{lastValue := LValue, lastRate := LRate} = State, LastTime, Time, Opts) -> + ProtLValue = maps:get(Protocol, LValue, #{}), + ProtLRate = maps:get(Protocol, LRate, #{}), + Paths = path_to_values(Value), + {RateValue, NewProtRate} = calculate_protocol_rate(Paths, Value, ProtLValue, ProtLRate, LastTime, Time, Opts), + NewState = State#{lastRate => LRate#{Protocol => NewProtRate}, lastValue => LValue#{Protocol => Value}}, + {add_total_focus_rate(Paths, RateValue), NewState}. + +-spec calculate_protocol_rate(list(), map(), map(), map(), term(), term(), map()) -> {map(), map()}. +calculate_protocol_rate(Paths, Value, ProtLValue, ProtLRate, T1, T2, _Opts) when map_size(ProtLRate) == 0 -> + NewRate = + lists:foldl(fun(Path, AccRate) -> + V1 = map_get(Path, ProtLValue, 0), + V2 = map_get(Path, Value, 0), + Rate = calculate_rate(V1, V2, T1, T2), + map_put(Path, Rate, AccRate) + end, ProtLRate, Paths), + {Value, NewRate}; +calculate_protocol_rate(Paths, Value, ProtLValue, ProtLRate, T1, T2, #{exp_factor := Factor}) -> + lists:foldl(fun(Path, {V, AccRate}) -> + V1 = map_get(Path, ProtLValue, 0), + V2 = map_get(Path, V, 0), + LRate = map_get(Path, AccRate, 0), + Rate = apply_factor(calculate_rate(V1, V2, T1, T2), LRate, Factor), + RatePath = focus_rate_path(Path), + {map_put(RatePath, round(Rate), V), map_put(Path, Rate, AccRate)} + end, {Value, ProtLRate}, Paths). + +-spec focus_rate_path(list()) -> list(). +focus_rate_path(Path) -> + [R | PathRev] = lists:reverse(Path), + RateKey = <<(to_binary(R))/binary, <<"_rate">>/binary >>, + lists:reverse([RateKey | PathRev]). + +-spec map_get(list(), term(), term()) -> term(). +map_get([], Value, _Default) -> Value; +map_get([K | Rest], M, Default) when is_map(M) -> map_get(Rest, maps:get(K, M, Default), Default); +map_get(_, _, Default) -> Default. + +-spec map_put(list(), term(), map()) -> map(). +map_put([], Value, _Map) -> Value; +map_put([K | Rest], Value, Map) -> + case Map of + #{K := KMap} when is_map(KMap) -> Map#{K => map_put(Rest, Value, KMap)}; + _ -> Map#{K => map_put(Rest, Value, #{})} + end. + +-spec add_total_focus_rate(list(), map()) -> map(). +add_total_focus_rate(Paths, #{esme := EsmeMap} = Value) -> + TotalRate = + lists:foldl( + fun([esme | Path], Total) -> + LastKey = lists:last(Path), + case binary:match(LastKey, <<"resp">>) of + nomatch -> + RatePath = focus_rate_path(Path), + Total + map_get(RatePath, EsmeMap, 0); + _ -> Total + end; + (_, Total) -> Total + end, 0, Paths), + Value#{total_rate => TotalRate}; +add_total_focus_rate(_Paths, Value) -> %% for focus of rest + TotalRate = + case Value of + #{del := #{del := #{<<"collect_rate">> := C}}, + sub := #{sub := #{<<"submit_sm_rate">> := S}}} -> S + C; + #{del := #{del := #{<<"collect_rate">> := C}}} -> C; + #{sub := #{sub := #{<<"submit_sm_rate">> := S}}} -> S; + _ -> 0 + end, + Value#{total_rate => TotalRate}. + +-spec to_binary(term()) -> binary(). +to_binary(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8); +to_binary(List) when is_list(List) -> list_to_binary(List); +to_binary(Bin) when is_binary(Bin) -> Bin. + +-spec check_memory(list(), map()) -> list(). +check_memory(Node, #{free_memory := FreeMemory, total_memory := TotalMemory}) -> + MaxMemory = ?MAX_MEMORY_THRESHOLD(Node), + UsedMemory = (100 - FreeMemory / TotalMemory * 100), + if UsedMemory >= MaxMemory -> erlang:round(UsedMemory); + true -> false + end; +check_memory(_, _) -> false. + +%% ----- TESTS ------------------------------------------------ +-ifdef(TEST). + +-include_lib("eunit/include/eunit.hrl"). + +apply_factor_test_() -> [ + ?_assertEqual(10, apply_factor(10, 0, 1)), + ?_assertEqual(9.1, apply_factor(10, 1, 0.9)), + ?_assertEqual(5.5, apply_factor(6, 5, 0.5)) +]. + +sync_puller_error_test_() -> + Ctx = #context{channel = <<"TEST">>, name = <<"TESTCOLLECTOR">>}, + Invalid = {metrics, {invalid_key, undefined}}, + Key = {dperl_metrics, {job_status, []}}, + EmptyInfos = {[], undefined}, + EmptyPrev = {undefined, undefined, #{}, undefined}, + StartSec = 10, + StartMicros = 123456, + StartTime = {StartSec, StartMicros}, + State = #{pushers => #{}, time => StartTime}, + EmptyResult = {[], [], State}, + BasePrev = {undefined, undefined, State, undefined}, + RefreshTime = {StartSec + ?PUSHER_MAP_CACHE_TIMEOUT + 1, StartMicros}, + PushersMap = #{ + "dperlSPullMaster" => puller, + "E2SM1" => puller, + "MBSN1" => puller, + "MBSN1Push" => [<<"MBSN1">>], + "MBSN2Push" => [<<"MBSN1">>], + "UAMPu" => [<<"UAMP">>]}, + NewState = #{pushers => PushersMap, time => RefreshTime}, + RefreshResult = {[], [], NewState}, + Prev = {undefined, undefined, NewState, undefined}, + RawPullerInfos = [ + {"E2SM1", #{state => #{lastAuditTime => [0,0],lastRefreshKey => -1},status => error}}, + {"MBSN1", #{state => #{lastAuditTime => [0,0],lastRefreshKey => -1},status => error}}, + {"dperlSPullMaster", #{state => + #{cleanup => #{ + count => 5, + lastAttempt => [1494415670,385774], + lastSuccess => [1494415671,47635] + }}, + status => idle} + } + ], + {PullerInfos, _} = dperl_status_pre:preprocess(Key, RawPullerInfos , undefined, node(), Ctx), + IdentityResult = {PullerInfos, [], NewState}, + RawInfos = [ + {"MBSN1Push", #{state => #{lastAuditTime => [0,0], lastRefreshKey => -1}, status => idle}}, + {"UAMPu", #{state => #{lastAuditTime => [0,0], lastRefreshKey => -1}, status => idle}} + ], + {Infos, _} = dperl_status_pre:preprocess(Key, RawInfos, undefined, node(), Ctx), + Data = [ + #{ckey => ["t","t@127","n","MBSN1","n","job_status","MBSN1"], + cvalue => <<"{\"status\":\"error\"}">>, chash => <<"h">>}, + #{ckey => ["t","t@127","n","UAMP","n","job_status","UAMP"], + cvalue => <<"{\"status\":\"idle\"}">>, chash => <<"h">>} + ], + ErrorInfos = [I#{cvalue => V#{status => error}} || #{cvalue := V} = I <- Infos], + Result = {[hd(Infos) | tl(ErrorInfos)], [], NewState}, + {foreach, + fun() -> + meck:new(imem_meta), + meck:new(imem_dal_skvh, [passthrough]) + end, + fun(_) -> + meck:unload(imem_meta), + meck:unload(imem_dal_skvh) + end, + [ + ?_assertException(error, function_clause, sync_puller_error(Invalid, {}, Ctx, {}, #{})), + {"Handles empty data", + fun() -> + meck:expect(imem_meta, transaction, 1, {atomic, #{}}), + meck:expect(imem_meta, time, 0, StartTime), + ?assertEqual(EmptyResult, sync_puller_error(Key, EmptyInfos, Ctx, EmptyPrev, #{})), + ?assert(meck:validate(imem_meta)) + end}, + {"Do not refresh before expiration", + fun() -> + meck:expect(imem_meta, time, 0, StartTime), + ?assertEqual(EmptyResult, sync_puller_error(Key, EmptyInfos, Ctx, BasePrev, #{})), + ?assert(meck:validate(imem_meta)) + end}, + {"Refresh list of pushers after expiration", + fun() -> + meck:expect(imem_meta, time, 0, RefreshTime), + meck:expect(imem_meta, transaction, 1, {atomic, PushersMap}), + ?assertEqual(RefreshResult, sync_puller_error(Key, EmptyInfos, Ctx, BasePrev, #{})), + ?assert(meck:validate(imem_meta)) + end}, + {"Should return same input if there are no pushers in the data", + fun() -> + meck:expect(imem_meta, time, 0, RefreshTime), + ?assertEqual(IdentityResult, sync_puller_error(Key, {PullerInfos, undefined}, Ctx, Prev, #{})), + ?assert(meck:validate(imem_meta)) + end}, + {"Should update the state of the pusher on error", + fun() -> + meck:expect(imem_meta, time, 0, RefreshTime), + meck:expect(imem_dal_skvh, read_deep, 3, Data), + ?assertEqual(Result, sync_puller_error(Key, {Infos, undefined}, Ctx, Prev, #{})), + ?assert(meck:validate(imem_meta)), + ?assert(meck:validate(imem_dal_skvh)) + end} + ] + }. + +check_nodes_test_() -> + Ctx = #context{channel = <<"TEST">>, name = <<"TESTCOLLECTOR">>}, + ErlangKey = {imem_metrics,erlang_nodes}, + DataKey = {imem_metrics, data_nodes}, + EmptyInfos = {[], undefined}, + EmptyPrev = {undefined, undefined, undefined, undefined}, + EmptyResult = {[], [], undefined}, + RawErlangInfos = #{nodes => ['dperl1@127.0.0.1'], required_nodes => ['dperl1@127.0.0.1',dperl_missing@127]}, + {ErlangInfos, _} = dperl_status_pre:preprocess(ErlangKey, RawErlangInfos, undefined, 'dperl1@127.0.0.1', Ctx), + ErlangResultRows = [ + #{ckey => ["TEST","system_info","dperl1@127.0.0.1","error","erlang_nodes"], + cvalue => #{error => <<"Missing nodes [dperl_missing@127]">>}} + ], + ErlangResult = {[], ErlangResultRows, undefined}, + RawDataInfos = #{data_nodes => [#{node => 'dperl1@127.0.0.1',schema => dperl}], + required_nodes => ['dperl1@127.0.0.1',dperl_missing@127]}, + {DataInfos, _} = dperl_status_pre:preprocess(DataKey, RawDataInfos, undefined, 'dperl1@127.0.0.1', Ctx), + DataResultRows = [ + #{ckey => ["TEST","system_info","dperl1@127.0.0.1","error","data_nodes"], + cvalue => #{error => <<"Missing nodes [dperl_missing@127]">>}} + ], + DataResult = {[], DataResultRows, undefined}, + {foreach, + fun() -> + ok + end, + fun(_) -> + ok + end, + [ + {"Should handle empty erlang nodes", + fun() -> + ?assertEqual(EmptyResult, check_nodes(ErlangKey, EmptyInfos, Ctx, EmptyPrev, #{})) + end}, + {"Should handle empty data nodes", + fun() -> + ?assertEqual(EmptyResult, check_nodes(DataKey, EmptyInfos, Ctx, EmptyPrev, #{})) + end}, + {"Should report erlang node errors", + fun() -> + ?assertEqual(ErlangResult, check_nodes(ErlangKey, {ErlangInfos, undefined}, Ctx, EmptyPrev, #{})) + end}, + {"Should report data node errors", + fun() -> + ?assertEqual(DataResult, check_nodes(DataKey, {DataInfos, undefined}, Ctx, EmptyPrev, #{})) + end} + ] + }. + +merge_node_errors_test_() -> + Ctx = #context{channel = <<"TEST">>, name = <<"TESTCOLLECTOR">>}, + Key = {dperl_metrics, {agr, node_error, []}}, + EmptyInfos = {[#{cvalue => #{}}], undefined}, + EmptyPrev = {undefined, undefined, undefined, undefined}, + EmptyResult = {[], [], undefined}, + {foreach, + fun() -> + ok + end, + fun(_) -> + ok + end, + [ + {"Handles empty data", + fun() -> + ?assertEqual(EmptyResult, merge_node_errors(Key, EmptyInfos, Ctx, EmptyPrev, #{})) + end} + ] + }. + +check_memory_on_nodes_test_() -> + Ctx = #context{channel = <<"TEST">>, name = <<"TESTCOLLECTOR">>}, + Key = {dperl_metrics, {agr, node_memory, []}}, + EmptyInfos = {[#{cvalue => #{}}], undefined}, + EmptyPrev = {undefined, undefined, undefined, undefined}, + EmptyResult = {[], [], undefined}, + {foreach, + fun() -> + ok + end, + fun(_) -> + ok + end, + [ + {"Handles empty data", + fun() -> + ?assertEqual(EmptyResult, check_memory_on_nodes(Key, EmptyInfos, Ctx, EmptyPrev, #{})) + end} + ] + }. + +% check_heartbeats_test_() -> +% Ctx = #context{channel = <<"TEST">>, name = <<"TESTCOLLECTOR">>}, +% Key = {dperl_metrics, {agr, heartbeat, []}}, +% EmptyInfos = {[#{cvalue => #{}}], undefined}, +% EmptyPrev = {undefined, undefined, undefined, undefined}, +% EmptyResult = {[], [], undefined}, +% {foreach, +% fun() -> +% ok +% end, +% fun(_) -> +% ok +% end, +% [ +% {"Handles empty data", +% fun() -> +% ?assertEqual(EmptyResult, check_heartbeats(Key, EmptyInfos, Ctx, EmptyPrev, #{})) +% end} +% ] +% }. + +%{AggrRound, AggrPrecise, AggrState, LastTime} = LastAgrInfo +focus_requests_rate_test_() -> + Ctx = #context{channel = <<"TEST">>, name = <<"TESTCOLLECTOR">>}, + Key = {mpro_metrics, {focus, "shortid", "1234"}}, + EmptyInfos = {[], undefined}, + EmptyPrev = {undefined, undefined, undefined, undefined}, + EmptyResult = {[], [], undefined}, + Focus = #{smpp => #{esme => #{downstream => #{<<"call_input_01">> => 8333}}}}, + FirstResultData = [#{ + ckey => ["TEST","focus","dperl1@127.0.0.1",["shortid","1234"],"smpp"], + cvalue => #{esme => #{downstream => #{<<"call_input_01">> => 8333}}} + }], + FirstNewState = #{ + lastValue => #{"smpp" => #{esme => #{downstream => #{<<"call_input_01">> => 8333}}}}, + lastRate => #{} + }, + FirstPrev = {undefined, undefined, #{}, undefined}, + FirstResult = {FirstResultData, [], FirstNewState}, + {FocusInfos, _} = dperl_status_pre:preprocess(Key, Focus, {}, 'dperl1@127.0.0.1', Ctx), + FocusResultData = [#{ + ckey => ["TEST","focus","dperl1@127.0.0.1",["shortid","1234"],"smpp"], + cvalue => #{esme => #{downstream => #{<<"call_input_01">> => 8333, + <<"call_input_01_rate">> => 20}}, total_rate => 20} + }], + State = #{ + lastValue => #{"smpp" => #{esme => #{downstream => #{<<"call_input_01">> => 8293}}}}, + lastRate => #{"smpp" => #{esme => #{downstream => #{<<"call_input_01">> => 0}}}} + }, + StartTime = {0, 0}, + UpdatedTime1 = {2, 0}, % Test after 2 seconds. + Prev = {undefined, undefined, State, StartTime}, + NewState = #{ + lastValue => #{"smpp" => #{esme => #{downstream => #{<<"call_input_01">> => 8333}}}}, + lastRate => #{"smpp" => #{esme => #{downstream => #{<<"call_input_01">> => 20.0}}}} + }, + FocusResult = {FocusResultData, [], NewState}, + {foreach, + fun() -> + ok + end, + fun(_) -> + ok + end, + [ + {"Handles empty data", + fun() -> + ?assertEqual(EmptyResult, focus_requests_rate(Key, EmptyInfos, Ctx, EmptyPrev, #{})) + end}, + {"Sets the state on first call", + fun() -> + ?assertEqual(FirstResult, focus_requests_rate(Key, {FocusInfos, StartTime}, Ctx, FirstPrev, #{})) + end}, + {"Correctly calculate rates", + fun() -> + ?assertEqual(FocusResult, focus_requests_rate(Key, {FocusInfos, UpdatedTime1}, Ctx, Prev, #{})) + end} + ] + }. + +path_to_values_test_() -> + Input = #{ + a => #{b => 5, d => #{ e => 6, f => 7 }}, + c => 6 + }, + Result = [[a, b], [a, d, e], [a, d, f], [c]], + ?_assertEqual(Result, path_to_values(Input)). + +map_put_test_() -> + {foreach, + fun() -> + ok + end, + fun(_) -> + ok + end, + [ + {"Set value on empty map", + fun() -> + Input = #{}, + Path = [a, b, c], + Value = 3, + Result = #{a => #{b => #{c => 3}}}, + ?assertEqual(Result, map_put(Path, Value, Input)) + end}, + {"Replace existing value", + fun() -> + Input = #{a => #{c => 3, d => 4}, b => 4}, + Path1 = [a, c], + Path2 = [a, d], + Value = 5, + Result1 = #{a => #{c => 5, d => 4}, b => 4}, + Result2 = #{a => #{c => 3, d => 5}, b => 4}, + ?assertEqual(Result1, map_put(Path1, Value, Input)), + ?assertEqual(Result2, map_put(Path2, Value, Input)) + end}, + {"Replace existing branch by value", + fun() -> + Input = #{a => #{c => #{e => 1}, d => 4}, b => 3}, + Path = [a, c], + Value = 5, + Result = #{a => #{c => 5, d => 4}, b => 3}, + ?assertEqual(Result, map_put(Path, Value, Input)) + end}, + {"Replace existing value by new branch", + fun() -> + Input = #{a => #{c => 5, d => 4}, b => 3}, + Path = [a, c, e], + Value = 1, + Result = #{a => #{c => #{e => 1}, d => 4}, b => 3}, + ?assertEqual(Result, map_put(Path, Value, Input)) + end} + ] + }. + + +-endif. diff --git a/src/dperl_status_pre.erl b/src/dperl_status_pre.erl new file mode 100644 index 00000000..939ee47a --- /dev/null +++ b/src/dperl_status_pre.erl @@ -0,0 +1,100 @@ +-module(dperl_status_pre). + +-include("dperl_status.hrl"). + +-export([preprocess/5]). + +preprocess(MetricKey, Value, Timestamp, Node, Ctx) -> + try preprocess_internal(MetricKey, Value, Timestamp, Node, Ctx) + catch + Error:Exception -> + ?Error("Unable to format the Metric ~p : ~p, error ~p:~p ~p", [MetricKey, Value, Error, Exception, erlang:get_stacktrace()]), + {[], Ctx} + end. + +preprocess_internal(heartbeat, Value, _Timestamp, Node, #context{channel = Channel} = Ctx) -> + Metric = #{ckey => [binary_to_list(Channel), "system_info", atom_to_list(Node), "system_info", "heartbeat"], + cvalue => Value}, + {[Metric], Ctx}; +preprocess_internal({imem_metrics, erlang_nodes}, NodeInfo, _Timestamp, Node, #context{channel = Channel} = Ctx) -> + Metric = #{ckey => [binary_to_list(Channel), "system_info", atom_to_list(Node), "system_info", "erlang_nodes"], + cvalue => NodeInfo}, + {[Metric], Ctx}; +preprocess_internal({imem_metrics, data_nodes}, DNodeInfo, _Timestamp, Node, #context{channel = Channel} = Ctx) -> + Metric = #{ckey => [binary_to_list(Channel), "system_info", atom_to_list(Node), "system_info", "data_nodes"], + cvalue => DNodeInfo}, + {[Metric], Ctx}; +preprocess_internal({imem_metrics, system_information}, NodeStatus, _Timestamp, Node, #context{channel = Channel} = Ctx) -> + Metric = #{ckey => [binary_to_list(Channel), "system_info", atom_to_list(Node), "system_info", "node_status"], + cvalue => NodeStatus}, + {[Metric], Ctx}; +preprocess_internal({dperl_metrics, {job_down_count, _}}, JobDownCount, _Timestamp, Node, #context{channel = Channel} = Ctx) -> + Metric = #{ckey => [binary_to_list(Channel), "system_info", atom_to_list(Node), "system_info", "job_down_count"], + cvalue => #{job_down_count => JobDownCount}}, + {[Metric], Ctx}; +preprocess_internal({dperl_metrics, {job_error_count, _}}, JobErrorCount, _Timestamp, Node, #context{channel = Channel} = Ctx) -> + Metric = #{ckey => [binary_to_list(Channel), "system_info", atom_to_list(Node), "system_info", "job_error_count"], + cvalue => #{job_error_count => JobErrorCount}}, + {[Metric], Ctx}; +preprocess_internal({dperl_metrics, {jobs, _}}, JobInfos, _Timestamp, _Node, #context{channel = Channel} = Ctx) -> + Metrics = lists:foldl( + fun({Job, Value}, Acc) -> + Key = [binary_to_list(Channel), Job, "cluster", "jobs", Job], + [#{ckey => Key, cvalue => Value} | Acc] + end, [], JobInfos), + {Metrics, Ctx}; +preprocess_internal({dperl_metrics, {job_status, _}}, JobStatuses, _Timestamp, Node, #context{channel = Channel} = Ctx) -> + Metrics = lists:foldl( + fun({Job, Value}, Acc) -> + Key = [binary_to_list(Channel), Job, atom_to_list(Node), "job_status", Job], + [#{ckey => Key, cvalue => Value} | Acc] + end, [], JobStatuses), + {Metrics, Ctx}; +preprocess_internal({dperl_metrics, {errors, _}}, JobStatuses, _Timestamp, _Node, #context{channel = Channel} = Ctx) -> + Metrics = lists:foldl( + fun({[Job, Node], Value}, Acc) -> + Key = [binary_to_list(Channel), Job, Node, "error", Job], + [#{ckey => Key, cvalue => Value} | Acc]; + ({[Job, Node, ErrorId], Value}, Acc) -> + ErrorId1 = case io_lib:printable_list(ErrorId) of + true -> ErrorId; + false -> format(ErrorId) + end, + Key = [binary_to_list(Channel), Job, Node, "error", ErrorId1], + [#{ckey => Key, cvalue => Value} | Acc] + end, [], JobStatuses), + {Metrics, Ctx}; +preprocess_internal({dperl_metrics, {focus,_,_}}, OrigValues, _Timestamp, _Node, #context{channel = Channel} = Ctx) -> + FocusMetrics = + [F#{ckey => [binary_to_list(Channel) | CKey]} || +  #{ckey := CKey} = F <- OrigValues], + {FocusMetrics, Ctx}; +preprocess_internal({dperl_metrics, {agr, AgrMetric, _, _}}, AgrData, _Timestamp, _Node, #context{} = Ctx) -> + Metric = #{ckey => [AgrMetric], cvalue => AgrData}, + {[Metric], Ctx}; +%% mpro Metrics +preprocess_internal({mpro_metrics, run_levels}, RunLevels, _Timestamp, Node, #context{channel = Channel} = Ctx) -> + {maps:fold( + fun(Protocol, RunLevel, Acc) -> + Metric = #{ckey => [binary_to_list(Channel), atom_to_list(Protocol), atom_to_list(Node), "run_level", ""], + cvalue => #{run_level => RunLevel}}, + [Metric | Acc] + end, [], RunLevels), Ctx}; +preprocess_internal({mpro_metrics, {focus, Topic, FocusKey}}, FocusMetrics, _Timestamp, Node, #context{channel = Channel} = Ctx) -> + {maps:fold( + fun(Protocol, FocusMetric, Acc) -> + Metric = #{ckey => [binary_to_list(Channel), "focus", atom_to_list(Node), [Topic, FocusKey], atom_to_list(Protocol)], + cvalue => FocusMetric}, + [Metric | Acc] + end, [], FocusMetrics), Ctx}; +preprocess_internal({mpro_metrics, {MetricKey, Protocol}}, Value, _Timestamp, Node, #context{channel = Channel} = Ctx) -> + Metric = #{ckey => [binary_to_list(Channel), atom_to_list(Protocol), atom_to_list(Node), atom_to_list(MetricKey), ""], + cvalue => #{MetricKey => Value}}, + {[Metric], Ctx}; +preprocess_internal({MetricMod, MetricKey}, Value, _Timestamp, Node, #context{channel = Channel} = Ctx) -> + Metric = #{ckey => [binary_to_list(Channel), atom_to_list(MetricMod), atom_to_list(Node), format(MetricKey), ""], + cvalue => Value}, + {[Metric], Ctx}. + +format(Key) -> + lists:flatten(io_lib:format("~p", [Key])). diff --git a/src/dperl_status_pull.erl b/src/dperl_status_pull.erl new file mode 100644 index 00000000..eca12c91 --- /dev/null +++ b/src/dperl_status_pull.erl @@ -0,0 +1,517 @@ +-module(dperl_status_pull). + +-include("dperl_status.hrl"). + +-behavior(dperl_worker). +-behavior(dperl_strategy_scr). + +% dperl_worker exports +-export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, + code_change/3, format_status/2, get_status/1, init_state/1]). + +% dperl_strategy_scr export +-export([connect_check_src/1, get_source_events/2, connect_check_dst/1, + do_cleanup/2, do_refresh/2, fetch_src/2, fetch_dst/2, delete_dst/2, + insert_dst/3, update_dst/3, report_status/3]). + +% Converting pid from string to local pid, required for rpc registration checks. +-export([is_process_alive/1]). + +connect_check_src(#context{connection = Conn} = Ctx) when map_size(Conn) == 0 -> {ok, Ctx}; +connect_check_src(#context{connection = #{credential := #{user := User, password := Password}, links := Links}, + active_link = ActiveLink, imem_sess = OldSession} = Ctx) -> + #{schema := Schema} = lists:nth(ActiveLink, Links), + case catch OldSession:run_cmd(schema, []) of + Schema -> {ok, Ctx}; + _ -> + catch OldSession:close(), + case dperl_dal:connect_imem_link(ActiveLink, Links, User, Password) of + {ok, Session, _Pid} -> {ok, Ctx#context{imem_sess = Session}}; + {Error, NewActiveLink} -> + ?JError("Error connecting to remote : ~p", [Error]), + {error, Error, Ctx#context{active_link = NewActiveLink, imem_sess = undefined, target_node = undefined}} + end + end. + +connect_check_dst(Ctx) -> {ok, Ctx}. + +do_cleanup(#context{channel = Channel, aggr_channel = AggrChannel, + name = JobName, stale_time = StaleTime} = State, _BulkSize) -> + FocusReg = clean_stale_registrations(Channel, JobName), + clean_stale_rows(Channel, FocusReg, StaleTime, State#context.debug), + clean_stale_rows(AggrChannel, FocusReg, StaleTime, State#context.debug), + {ok, finish, State}. + +do_refresh(_, _) -> error(sync_only_job). + +get_source_events(#context{is_loaded = true} = Ctx, _BulkSize) -> + {ok, sync_complete, Ctx#context{ is_loaded = false }}; +get_source_events(#context{metrics = Metrics, focus = Focus, channel = Channel, requested_metrics = NotResponded, + sync_cnt = SyncCnt, imem_sess = Session} = Ctx, _BulkSize) -> + if Ctx#context.debug == none -> no_op; + true -> + ?JInfo("Metrics requested : ~p Metrics received : ~p", [Ctx#context.requested_cnt, Ctx#context.received_cnt]) + end, + {RequestedMetrics, RquestedCnt} = request_metrics(Metrics, Ctx, Session, SyncCnt, NotResponded), + {NewReqMetrics, FocusRequestCnt} = + case maps:size(Focus) of + 0 -> {RequestedMetrics, 0}; + _ -> + Registrations = get_focus_registrations(Channel), + request_focus_metrics(Registrations, Focus, Session, SyncCnt, RequestedMetrics) + end, + % on error in jobDyn table for jobs with only sync enabled, error status is not + % cleared as the state is empty issue #476 + if + Ctx#context.sync_only -> + dperl_dal:update_job_dyn(list_to_binary(Ctx#context.name), idle); + true -> no_op + end, + {ok, sync_complete, Ctx#context{requested_cnt = RquestedCnt + FocusRequestCnt, received_cnt = 0, + sync_cnt = SyncCnt + 1, requested_metrics = NewReqMetrics, is_loaded = true}}. + +fetch_src(_Key, _State) -> ?NOT_FOUND. + +fetch_dst(_Key, _State) -> ?NOT_FOUND. + +delete_dst(_Key, State) -> {false, State}. + +insert_dst(_Key, _Val, State) -> {false, State}. + +update_dst(_Key, _Val, State) -> {false, State}. + +report_status(_Key, _, _State) -> no_op. + +get_status(_Ctx) -> #{}. + +init_state(_Ignored) -> #context{}. + +init({#dperlJob{name=NameBin, dstArgs=#{channel := Channel}, args=Args, srcArgs=SrcArgs}, Ctx}) -> + ChannelBin = list_to_binary(Channel), + AggrChannel = <>, + Metrics = add_channel_to_agr_metrics(maps:get(metrics, SrcArgs, []), ChannelBin, []), + Focus = maps:get(focus, SrcArgs, #{}), + HeartBeatInterval = maps:get(heartBeatInterval, Args, 10000), + Aggregators = fetch_aggregators(Metrics, #{focus => fetch_aggregators(Focus)}), + TableOpts = maps:get(tableOpts, Args, []), + NodeCollector = maps:get(node_collector, SrcArgs, undefined), + Connection = sort_links(maps:get(connection, SrcArgs, #{})), + Name = binary_to_list(NameBin), + Debug = maps:get(debug, Args, none), + SyncOnly = case Args of + #{cleanup := false, refresh := false} -> true; + _ -> false + end, + case is_safe_funs(lists:flatten(maps:values(Aggregators))) of + true -> + StaleTime = ?STALE_TIME(?MODULE, NameBin), + ets:new(?METRICS_TAB(Name), [public, named_table, {keypos,2}]), + imem_dal_skvh:create_check_channel(ChannelBin, TableOpts), + imem_dal_skvh:create_check_channel(AggrChannel, [{type, map}]), + imem_snap:exclude_table_pattern(Channel), + ?JInfo("Cleaning up before start"), + NodeList = atom_to_list(node()), + dperl_dal:remove_deep(ChannelBin, [Name, NodeList]), + dperl_dal:remove_deep(AggrChannel, [Name, NodeList]), + ?JInfo("Starting"), + self() ! heartbeat, + {ok, Ctx#context{name=Name, channel = ChannelBin, metrics = Metrics, focus = Focus, + heart_interval = HeartBeatInterval, aggregators = Aggregators, + node_collector = NodeCollector, aggr_channel = AggrChannel, + stale_time = StaleTime, debug = Debug, connection = Connection, + sync_only = SyncOnly}}; + false -> + ?JError("One or more of the aggregator funs are not safe to be executed"), + {stop, not_safe_aggr_funs} + end; +init({Args, _}) -> + ?JError("bad start parameters ~p", [Args]), + {stop, badarg}. + +handle_call(Request, _From, Ctx) -> + ?JWarn("Unsupported handle_call ~p", [Request]), + {reply, ok, Ctx}. + +handle_cast(Request, Ctx) -> + ?JWarn("Unsupported handle_cast ~p", [Request]), + {noreply, Ctx}. + +handle_info({metric, ReqRef, _, _, _} = Response, #context{requested_metrics = RMs} = Ctx) -> + NewCtx = + case maps:get(ReqRef, RMs, none) of + none -> process_metric(Response, Ctx); + _ -> process_metric(Response, Ctx#context{requested_metrics = maps:remove(ReqRef, RMs)}) + end, + {noreply, NewCtx}; +handle_info(heartbeat, #context{target_node = undefined, heart_interval = HInt} = Ctx) -> + erlang:send_after(HInt, self(), heartbeat), + {noreply, Ctx}; +handle_info(heartbeat, #context{channel = Channel, name = Name, heart_interval = HInt, + aggr_channel = AggrChannel, target_node = TNode, + requested_metrics = RequestedMetrics} = Ctx) -> + #context{ + success_cnt = SuccessCnt, + mem_overload_cnt = MemOverloadCnt, + cpu_overload_cnt = CpuOverloadCnt, + eval_suspend_cnt = EvalCrashCnt, + errors = Errors + } = Ctx, + Value = #{ + time => erlang:system_time(milli_seconds), + success_count => SuccessCnt, + memory_overload_count => MemOverloadCnt, + cpu_overload_count => CpuOverloadCnt, + eval_crash_count => EvalCrashCnt, + errors => Errors + }, + {HeartbeatInfo, NewCtx} = dperl_status_pre:preprocess(heartbeat, Value, imem_meta:time(), TNode, Ctx), + provision(Name, AggrChannel, HeartbeatInfo, false), %% writing to aggr table + provision(Name, Channel, HeartbeatInfo, true), + RMetrics = filter_not_responded_metrics(Name, RequestedMetrics), + erlang:send_after(HInt, self(), heartbeat), + {noreply, NewCtx#context{success_cnt = 0, mem_overload_cnt = 0, errors = [], + cpu_overload_cnt = 0, eval_suspend_cnt = 0, + requested_metrics = RMetrics}}; +handle_info(Request, Ctx) -> + ?JWarn("handle_info ~p", [Request]), + {noreply, Ctx}. + +terminate(Reason, #context{channel=Channel, name=Name, aggr_channel = AggrChannel}) -> + NodeList = atom_to_list(node()), + dperl_dal:remove_deep(Channel, [Name, NodeList]), + dperl_dal:remove_deep(AggrChannel, [Name, NodeList]), + ?JInfo("terminate ~p", [Reason]). + +code_change(OldVsn, Ctx, Extra) -> + ?JInfo("code_change ~p:~n~p", [OldVsn, Extra]), + {ok, Ctx}. + +format_status(Opt, [PDict, Ctx]) -> + ?JInfo("format_status ~p:~n~p", [Opt, PDict]), + Ctx. + +process_metric({metric, ReqRef, _Timestamp, _Node, {error, user_input}}, #context{} = Ctx) -> + ?JError("User Input error when requesting metric ~p", [ReqRef]), + Ctx; +process_metric({metric, ReqRef, _Timestamp, Node, {error, Error}}, #context{errors = Errors, target_node = TNode} = Ctx) -> + ?JError("Error ~p when requesting metric ~p", [Error, ReqRef]), + MetricBin = imem_datatype:term_to_io(ReqRef), + ErrorBin = imem_datatype:term_to_io(Error), + Ctx#context{errors = [#{metricKey => MetricBin, error => ErrorBin} | Errors], + target_node = get_target_node(Node, TNode)}; +process_metric({metric, ReqRef, _Timestamp, Node, cpu_overload}, #context{cpu_overload_cnt = CpuOverloadCnt, target_node = TNode} = Ctx) -> + ?JWarn("Unable to get metric ~p as server is on cpu overload", [ReqRef]), + Ctx#context{cpu_overload_cnt = CpuOverloadCnt + 1, target_node = get_target_node(Node, TNode)}; +process_metric({metric, ReqRef, _Timestamp, Node, memory_overload}, #context{mem_overload_cnt = MemOverloadCnt, target_node = TNode} = Ctx) -> + ?JWarn("Unable to get metric ~p as server is on memory overload", [ReqRef]), + Ctx#context{mem_overload_cnt = MemOverloadCnt + 1, target_node = get_target_node(Node, TNode)}; +process_metric({metric, ReqRef, _Timestamp, Node, eval_crash_suspend}, #context{eval_suspend_cnt = EvalCrashCnt, + target_node = TNode} = Ctx) -> + ?JWarn("Unable to get metric ~p as server is on eval crash suspend state", [ReqRef]), + Ctx#context{eval_suspend_cnt = EvalCrashCnt + 1, target_node = get_target_node(Node, TNode)}; +process_metric({metric, MetricReqKey, Timestamp, Node, OrigMetrics}, #context{channel = Channel, name = Name, + received_cnt = ReceivedCnt, success_cnt = SuccessCnt, aggregators = Aggrs, + aggr_channel = AggrChannel, target_node = TNode} = Ctx) -> + %% Append the channel to the key... + {Metrics, PreCtx} = dperl_status_pre:preprocess(MetricReqKey, OrigMetrics, Timestamp, Node, Ctx), + {AggrMetrics, AgrPrecise} = process_aggregators(Aggrs, PreCtx, MetricReqKey, Metrics, Timestamp), + Writes = provision(Name, Channel, AggrMetrics, true), + provision(Name, AggrChannel, AgrPrecise, false), %% writing to aggr table + if Ctx#context.debug == detail -> + ?JInfo("Metric : ~p received : ~p written : ~p", [MetricReqKey, length(Metrics), Writes]); + true -> no_op + end, + PreCtx#context{received_cnt = ReceivedCnt + 1, success_cnt = SuccessCnt + 1, + target_node = get_target_node(Node, TNode)}. + +process_aggregators(Aggregators, Ctx, {_, {focus, _, _}} = FocusKey, Metrics, Time) -> + FocusAgggregators = maps:get(focus, Aggregators, []), + process_aggregators(FocusAgggregators, Ctx, FocusKey, Metrics, Time, {[], []}); +process_aggregators(Aggregators, Ctx, MetricKey, Metrics, Time) -> + MetricAggregators = maps:get(MetricKey, Aggregators, []), + process_aggregators(MetricAggregators, Ctx, MetricKey, Metrics, Time, {[], []}). + +process_aggregators([], _Ctx, _MetricKey, _Metrics, _Time, Acc) -> Acc; +process_aggregators([{AgrMod, AgrFun, Opts} | RestAggregators], #context{name = Name, aggr_channel = AggrChannel, channel = Channel} = Ctx, + MetricKey, Metrics, Time, {AggRAcc, AggPAcc} = Acc) -> + EtsKey = {MetricKey, AgrMod, AgrFun}, + %{AggrRound, AggrPrecise, AggrState, LastTime} = LastAgrInfo + {LastAggRound, LastAggPrecise, _, _} = LastAgrInfo = + case ets:lookup(?METRICS_TAB(Name), EtsKey) of + [#dperl_metrics{agg_round = AR, agg_precise = AP, state = AS, time = LTime}] -> + {AR, AP, AS, LTime}; + [] -> {[], [], #{}, undefined} + end, + NewAcc = + case catch erlang:apply(AgrMod, AgrFun, [MetricKey, {Metrics, Time}, Ctx, LastAgrInfo, Opts]) of + {AggMetrics, AggPrecise, AggState} when is_list(AggMetrics) -> + remove_old_keys(Name, Channel, AggMetrics, LastAggRound), %% removing from the main table + remove_old_keys(Name, AggrChannel, AggPrecise, LastAggPrecise), %% removing from the aggr table + ets:insert(?METRICS_TAB(Name), #dperl_metrics{key = EtsKey, + agg_round = AggMetrics, + agg_precise = AggPrecise, + state = AggState, + time = Time}), + {AggRAcc ++ AggMetrics, AggPAcc ++ AggPrecise}; + Error -> + ?JError("Aggregation error for ~p:~p Error : ~p", [AgrMod, AgrFun, Error]), + Acc + end, + process_aggregators(RestAggregators, Ctx, MetricKey, Metrics, Time, NewAcc). + +provision(Name, Channel, Infos, ShouldEncode) -> + provision(Name, Channel, Infos, ShouldEncode, 0). + +provision(_Name, _Channel, [], _ShouldEncode, Writes) -> Writes; +provision(Name, Channel, [#{ckey := OrigKey, cvalue := Value} | Infos], ShouldEncode, Writes) -> + Key = [Name, atom_to_list(node()) | OrigKey], + case dperl_dal:write_if_different(Channel, Key, ensure_json_encoded(Value, ShouldEncode)) of + no_op -> provision(Name, Channel, Infos, ShouldEncode, Writes); + _ -> provision(Name, Channel, Infos, ShouldEncode, Writes + 1) + end. + +ensure_json_encoded(Value, false) -> Value; +ensure_json_encoded(Value, true) when is_binary(Value) -> Value; +ensure_json_encoded(Value, true) -> imem_json:encode(Value). + +%%TODO: This should take into account the already requested metrics with no response yet. +request_metrics(Metrics, Context, Session, SyncCnt, NotResponded) when is_map(NotResponded)-> + request_metrics(Metrics, Context, Session, SyncCnt, {NotResponded, 0}); +request_metrics([], _Context, _Session, _SyncCnt, Acc) -> Acc; +request_metrics([#{key := {agr, AgrMetric, []}} = Metric | Rest], Context, Session, SyncCnt, Acc) -> + MetricKey = {agr, AgrMetric, Context#context.channel}, + request_metrics([Metric#{key => MetricKey} | Rest], Context, Session, SyncCnt, Acc); +request_metrics([Metric | Rest], Context, Session, SyncCnt, {RMetrics, _} = Acc) -> + ReqResult = request_metric(Metric, Session, SyncCnt, RMetrics), + request_metrics(Rest, Context, Session, SyncCnt, acc_if_ok(ReqResult, Acc)). + +-spec request_metric(map(), term(), integer(), map()) -> ok | error | posponed. +request_metric(#{key := MetricKey, metric_src := Mod} = Metric, Session, SyncCnt, RMetrics) -> + Frequency = maps:get(frequency, Metric, 1), + case is_time_to_run(Frequency, {Mod, MetricKey}, SyncCnt, RMetrics) of + true -> request_metric(Metric, Session); + false -> posponed + end; +request_metric(Metric, _Session, _SyncCnt, _RMetrics) -> + ?JError("Metrics not in the correct format : ~p", [Metric]), + error. + +-spec request_metric(map(), term()) -> ok | error. +request_metric(#{location := local, metric_src := Mod, key := MetricKey}, _Session) -> + ok = imem_gen_metrics:request_metric(Mod, MetricKey, {Mod, MetricKey}, self()), + {ok, {Mod, MetricKey}}; +request_metric(#{location := remote, metric_src := Mod, key := MetricKey}, Session) -> + ok = Session:run_cmd(request_metric, [Mod, MetricKey, {Mod, MetricKey}]), + {ok, {Mod, MetricKey}}; +request_metric(Metric, _Session) -> + ?JError("Metrics not in the correct format : ~p", [Metric]), + error. + +-spec acc_if_ok(term(), tuple()) -> tuple(). +acc_if_ok({ok, Ref}, {RMetrics, Count}) -> + {RMetrics#{Ref => imem_meta:time()}, Count + 1}; +acc_if_ok(_, Acc) -> Acc. + +get_focus_registrations(Channel) -> + BaseKey = ["register", "focus"], + imem_dal_skvh:read_shallow(system, Channel, [BaseKey]). + +request_focus_metrics(Registrations, Focus, Session, SyncCnt, RequestedMetrics) when is_map(RequestedMetrics) -> + request_focus_metrics(Registrations, Focus, Session, SyncCnt, {RequestedMetrics, 0}); +request_focus_metrics([], _Focus, _Session, _SyncCnt, Acc) -> Acc; +request_focus_metrics([#{cvalue := CValue} | Registrations], Focus, Session, SyncCnt, Acc) -> + KeyTopicsList = imem_json:decode(CValue), + NewAcc = request_focus_metrics_internal(KeyTopicsList, Focus, Session, SyncCnt, Acc), + request_focus_metrics(Registrations, Focus, Session, SyncCnt, NewAcc). + +-spec request_focus_metrics_internal(list(), map(), term(), integer(), tuple()) -> tuple(). +request_focus_metrics_internal([], _, _, _, Acc) -> Acc; +request_focus_metrics_internal([[Topic, Key] | Rest], Focus, Session, SyncCnt, {RMetrics, _} = Acc) -> + case maps:is_key(Topic, Focus) of + true -> + Metric = maps:get(Topic, Focus), + MetricKey = {focus, binary_to_list(Topic), binary_to_list(Key)}, + ReqResult = request_metric(Metric#{key => MetricKey}, Session, SyncCnt, RMetrics), + request_focus_metrics_internal(Rest, Focus, Session, SyncCnt, acc_if_ok(ReqResult, Acc)); + false -> + request_focus_metrics_internal(Rest, Focus, Session, SyncCnt, Acc) + end. + +clean_stale_rows(Channel, FocusReg, StaleTime, Debug) -> + TableName = imem_dal_skvh:atom_table_name(Channel), + CleanFun = fun() -> + FirstKey = imem_meta:first(TableName), + clean_stale_rows(TableName, FirstKey, StaleTime, #{}, FocusReg, Debug, 0) + end, + case imem_meta:transaction(CleanFun) of + {atomic, ok} -> ok; + ErrorResult -> + ?JError("Error cleaning stale rows, result: ~p", [ErrorResult]) + end. + +clean_stale_rows(_TableName, '$end_of_table', _StaleTime, _Heartbeats, _FocusReg, _Debug, 0) -> ok; +clean_stale_rows(_TableName, '$end_of_table', _StaleTime, _Heartbeats, _FocusReg, none, _Count) -> ok; +clean_stale_rows(_TableName, '$end_of_table', _StaleTime, _Heartbeats, _FocusReg, _Debug, Count) -> + ?JInfo("clean up deleted ~p rows", [Count]); +clean_stale_rows(TableName, NextKey, StaleTime, Heartbeats, FocusReg, Debug, Count) -> + [RawRow] = imem_meta:read(TableName, NextKey), + #{ckey := CKey} = imem_dal_skvh:skvh_rec_to_map(RawRow), + {NHeartMap, NCount} = case CKey of + [Cid, Node, Platform, "focus", _TNode, FocusId, _FKey] -> + case lists:member(FocusId, FocusReg) of + true -> + HBKey = [Cid, Node, Platform, "system_info"], + clean_stale_row(maps:get(HBKey, Heartbeats, undefined), Heartbeats, + Count, TableName, RawRow, HBKey, StaleTime); + false -> + imem_meta:remove(TableName, RawRow), + {Heartbeats, Count + 1} + end; + [Cid, Node, Platform, _Group | _Rest] -> + HBKey = [Cid, Node, Platform, "system_info"], + clean_stale_row(maps:get(HBKey, Heartbeats, undefined), Heartbeats, + Count, TableName, RawRow, HBKey, StaleTime); + _ -> {Heartbeats, Count} + end, + NKey = imem_meta:next(TableName, NextKey), + clean_stale_rows(TableName, NKey, StaleTime, NHeartMap, FocusReg, Debug, NCount). + +clean_stale_row(current, Heartbeats, Count, _TableName, _RawRow, _HBKey, _StaleTime) -> {Heartbeats, Count}; +clean_stale_row(stale, Heartbeats, Count, TableName, RawRow, _HBKey, _StaleTime) -> + imem_meta:remove(TableName, RawRow), + {Heartbeats, Count + 1}; +clean_stale_row(undefined, Heartbeats, Count, TableName, RawRow, HBKey, StaleTime) -> + case fetch_heartbeat(TableName, HBKey) of + not_found -> + imem_meta:remove(TableName, RawRow), + {Heartbeats#{HBKey => stale}, Count + 1}; + CValue -> + T = extract_time(CValue), + ElapsedTime = erlang:system_time(milli_seconds) - T, + case ElapsedTime > StaleTime of + true -> + imem_meta:remove(TableName, RawRow), + {Heartbeats#{HBKey => stale}, Count + 1}; + false -> + {Heartbeats#{HBKey => current}, Count} + end + end. + +clean_stale_registrations(Channel, JobName) -> + BaseKey = ["register", "focus"], + lists:usort(clean_stale_registrations(Channel, JobName, imem_dal_skvh:read_shallow(system, Channel, [BaseKey]), [])). + +%% TODO: Find better name as this cleans the registrations and returns the list +%% of topics for later data cleanup, maybe name should be more descriptive. +clean_stale_registrations(_Channel, _JobName, [], Acc) -> Acc; +clean_stale_registrations(Channel, JobName, [#{ckey := ["register", "focus", [NodeString, PidString]], cvalue := CValue} = Reg | Rest], Acc) -> + Node = list_to_existing_atom(NodeString), + case rpc:call(Node, dperl_status_pull, is_process_alive, [PidString]) of + true -> + KeyTopicsList = [[binary_to_list(Topic), binary_to_list(Key)] || [Topic, Key] <- imem_json:decode(CValue)], + clean_stale_registrations(Channel, JobName, Rest, KeyTopicsList ++ Acc); + false -> + remove_registration(Channel, Reg, JobName), + clean_stale_registrations(Channel, JobName, Rest, Acc); + {badrpc, _} -> + %% Unable to check the registration, removing invalid data. + remove_registration(Channel, Reg, JobName), + clean_stale_registrations(Channel, JobName, Rest, Acc) + end. + +remove_registration(Channel, Reg, JobName) -> + imem_dal_skvh:remove(system, Channel, Reg), + %removing focus ets entry + #{cvalue := FocusVal} = Reg, + case imem_json:decode(FocusVal) of + [FocusId | _] -> + MetricKey = {dperl_metrics, list_to_tuple([focus | dperl_dal:key_from_json(FocusId)])}, + EtsFocusKey = {MetricKey, dperl_status_agr, write_to_channel}, + ets:delete(?METRICS_TAB(JobName), EtsFocusKey); + _ -> no_op + end. + +is_process_alive(PidString) -> + Pid = list_to_pid(PidString), + erlang:is_process_alive(Pid). + +is_time_to_run(Frequency, Key, SyncCnt, RMetrics) -> + case RMetrics of + #{Key := _} -> false; + _ -> + Frequency == 1 orelse (SyncCnt rem Frequency == erlang:phash2(Key, 1023) rem Frequency) + end. + +sort_links(Connection) when map_size(Connection) == 0 -> #{}; +sort_links(#{links := Links} = Connection) -> + Connection#{links => dperl_dal:sort_links(Links)}. + +-spec is_safe_funs(list()) -> boolean(). +is_safe_funs([]) -> true; +is_safe_funs([{Mod, Fun, _Opts} | ModFuns]) -> + lists:member({Mod, Fun, 5}, imem_compiler:safe(Mod)) andalso is_safe_funs(ModFuns). + +remove_old_keys(Name, Channel, NewKeyVals, OldKeyVals) -> + NewKeys = [Key || #{ckey := Key} <- NewKeyVals], + OldKeys = [Key || #{ckey := Key} <- OldKeyVals], + %% Removing old keys that are not valid any more + [dperl_dal:remove_from_channel(Channel, [Name, atom_to_list(node()) | Key]) || Key <- OldKeys -- NewKeys]. + +-spec get_target_node(atom(), atom()) -> atom(). +get_target_node(MetricNode, undefined) -> MetricNode; +get_target_node(MetricNode, TargetNode) when node() =:= MetricNode -> TargetNode; +get_target_node(MetricNode, _TargetNode) -> MetricNode. + +-spec extract_time(map() | binary()) -> map(). +extract_time(Value) when is_binary(Value) -> + #{<<"time">> := T} = imem_json:decode(Value, [return_maps]), + T; +extract_time(#{time := T}) -> T. + +-spec fetch_aggregators(list(), map()) -> map(). +fetch_aggregators([], Acc) -> Acc; +fetch_aggregators([#{metric_src := Mod, key := MetricKey} = Metric | Metrics], Acc) -> + NewAcc = Acc#{{Mod, MetricKey} => fetch_aggregators(Metric)}, + fetch_aggregators(Metrics, NewAcc). + +-spec fetch_aggregators(map()) -> list(). +fetch_aggregators(Metric) -> + case maps:get(aggregators, Metric, none) of + none -> [{dperl_status_agr,write_to_all, #{}}]; + Aggrs -> + lists:foldr( + fun({AMod, AFun}, AgrAcc) -> [{AMod, AFun, #{}} | AgrAcc]; + ({AMod, AFun, Opts}, AgrAcc) -> [{AMod, AFun, Opts} | AgrAcc] + end, [], Aggrs) + end. + +-spec add_channel_to_agr_metrics(list(), binary(), list()) -> list(). +add_channel_to_agr_metrics([], _Channel, Acc) -> lists:reverse(Acc); +add_channel_to_agr_metrics([#{key := {agr, Agr, []}} = Metric | Metrics], Channel, Acc) -> + add_channel_to_agr_metrics(Metrics, Channel, [Metric#{key => {agr, Agr, Channel}} | Acc]); +add_channel_to_agr_metrics([Metric | Metrics], Channel, Acc) -> + add_channel_to_agr_metrics(Metrics, Channel, [Metric | Acc]). + +-spec fetch_heartbeat(atom(), list()) -> not_found | map(). +fetch_heartbeat(Table, Key) -> + fetch_heartbeat(imem_dal_skvh:read_deep(system, atom_to_binary(Table,utf8), [Key])). + +-spec fetch_heartbeat(list()) -> not_found | map() | binary(). +fetch_heartbeat([]) -> not_found; +fetch_heartbeat([#{ckey := [_, _, _, "system_info", _TNode, "system_info", "heartbeat"], + cvalue := Value} | _]) -> Value; +fetch_heartbeat([_ | Rest]) -> fetch_heartbeat(Rest). + +-spec filter_not_responded_metrics(binary(), map()) -> {list(), map()}. +filter_not_responded_metrics(Name, Metrics) -> + Now = imem_meta:time(), + DiffLimit = ?METRIC_WAIT_THRESHOLD(Name), + maps:fold( + fun(M, Time, WaitingMetrics) -> + case imem_datatype:sec_diff(Time, Now) of + Diff when Diff < DiffLimit -> WaitingMetrics; + _ -> + ?Error("Metric : ~p has not got any response till ~p seconds", [M, DiffLimit]), + maps:remove(M, WaitingMetrics) + end + end, Metrics, Metrics). diff --git a/src/dperl_status_push.erl b/src/dperl_status_push.erl new file mode 100644 index 00000000..7f6ca8de --- /dev/null +++ b/src/dperl_status_push.erl @@ -0,0 +1,151 @@ +-module(dperl_status_push). + +-include("dperl.hrl"). + +-behavior(dperl_worker). +-behavior(dperl_strategy_scr). + +% dperl_worker exports +-export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, + code_change/3, format_status/2, get_status/1, init_state/1]). + +-record(state, {cred, links, imem_sess, name, channel, first_sync = true, + audit_start_time = {0,0}, chunk_size = 200, provs = [], + active_link = 1, mod, func, imem_connected = false}). + +% dperl_strategy_scr export +-export([connect_check_src/1, get_source_events/2, connect_check_dst/1, + do_cleanup/2, do_refresh/2, fetch_src/2, fetch_dst/2, delete_dst/2, + insert_dst/3, update_dst/3, report_status/3, is_equal/4]). + +get_source_events(#state{channel = Channel, + audit_start_time = LastStartTime} = State, BulkSize) -> + case length(State#state.provs) > 0 of + true -> {ok, State#state.provs, State#state{provs = []}}; + _ -> + case dperl_dal:read_audit(Channel, LastStartTime, BulkSize) of + {LastStartTime, LastStartTime, []} -> + if State#state.first_sync == true -> + ?JInfo("Audit rollup is complete"), + {ok, sync_complete, State#state{first_sync = false}}; + true -> {ok, sync_complete, State} + end; + {_StartTime, NextStartTime, []} -> + {ok, [], State#state{audit_start_time = NextStartTime}}; + {_StartTime, NextStartTime, Statuses} -> + {ok, [K || #{key := K, nval := NV} <- Statuses, NV /= undefined], + State#state{audit_start_time = NextStartTime}} + end + end. + +connect_check_src(State) -> {ok, State}. + +connect_check_dst(State) -> ?CONNECT_CHECK_IMEM_LINK(State). + +do_cleanup(_, _) -> error(sync_only_job). + +do_refresh(_, _) -> error(sync_only_job). + +fetch_src(Key, #state{channel = Channel}) -> + dperl_dal:job_error_close(Key), + dperl_dal:read_channel(Channel, Key). + +fetch_dst(_Key, _State) -> ?NOT_FOUND. + +delete_dst(_Key, State) -> {false, State}. + +insert_dst([JMod, Chn, SId, Job] = Key, Val, #state{mod = Mod, func = Fun, imem_sess = Sess, + channel = Channel} = State) -> + Count = dperl_dal:count_sibling_jobs(list_to_existing_atom(JMod), Chn), + case Val of + #{auditTime := '$do_not_log'} -> {false, State}; + #{auditTime := Time} -> + FormattedTime = case Time of + undefined -> + undefined; + Time when is_list(Time) -> + list_to_tuple(Time) + end, + Status = #{channel => Chn, shortid => SId, job => Job, + at => FormattedTime}, + NewStatus = case Val of + #{error := Error} -> Status#{error => Error}; + _ -> Status + end, + case catch Sess:run_cmd(dal_exec, [Mod, Fun, [Count, [NewStatus]]]) of + {'EXIT', Err} -> + self() ! connect_src_link, + dperl_dal:job_error(Key, <<"sync">>, <<"process_staupdate_dsttus">>, Err), + ?JError("Status update error ~p", [Err]), + {true, State}; + ok -> + case maps:is_key(error, Val) of + false -> dperl_dal:remove_from_channel(Channel, Key); + true -> no_op + end, + {false, State}; + Other -> + dperl_dal:job_error(Key, <<"sync">>, <<"process_status">>, Other), + ?JWarn("Status update bad return ~p", [Other]), + {true, State} + end + end. + +update_dst(_Key, _Val, State) -> {false, State}. + +report_status(_Key, _, _State) -> no_op. + +is_equal(_Key, ?NOT_FOUND, _, _State) -> true; +is_equal(_Key, _Src, _, _State) -> false. + +get_status(#state{audit_start_time = LastAuditTime}) -> + #{lastAuditTime => LastAuditTime}. + +init_state([]) -> #state{}; +init_state([#dperlNodeJobDyn{state = #{lastAuditTime := LastAuditTime}} | _]) -> + #state{audit_start_time = LastAuditTime}; +init_state([_ | Others]) -> + init_state(Others). + +init({#dperlJob{name=Name, args = _Args, srcArgs = #{channel := Channel}, + dstArgs = #{credential := Credential, links := Links, + mod := Mod, func := Fun}}, State}) -> + dperl_dal:create_check_channel(Channel, [audit, {type,map}]), + ?JInfo("Starting at audit ~s", [dperl_dal:ts_str(State#state.audit_start_time)]), + {ok, State#state{name=Name, cred = Credential, links = Links, + channel = Channel, mod = Mod, func = Fun}}; +init({Args, _}) -> + ?JError("bad start parameters ~p", [Args]), + {stop, badarg}. + +handle_call(Request, _From, State) -> + ?JWarn("handle_call ~p", [Request]), + {reply, ok, State}. + +handle_cast(Request, State) -> + ?JWarn("handle_cast ~p", [Request]), + {noreply, State}. + +handle_info(Request, State) -> + ?JWarn("handle_info ~p", [Request]), + {noreply, State}. + +terminate(Reason, #state{imem_sess = undefined}) -> + ?JInfo("terminate ~p", [Reason]); +terminate(Reason, #state{imem_sess = Session}) -> + ?JInfo("terminate ~p", [Reason]), + try + Session:close() + catch + _:Error -> + dperl_dal:job_error(<<"terminate">>, <<"terminate">>, Error), + ?JError("terminate ~p:~p", [Reason, Error]) + end. + +code_change(OldVsn, State, Extra) -> + ?JInfo("code_change ~p: ~p", [OldVsn, Extra]), + {ok, State}. + +format_status(Opt, [PDict, State]) -> + ?JInfo("format_status ~p: ~p", [Opt, PDict]), + State. diff --git a/src/dperl_strategy_scr.erl b/src/dperl_strategy_scr.erl new file mode 100644 index 00000000..99887509 --- /dev/null +++ b/src/dperl_strategy_scr.erl @@ -0,0 +1,875 @@ +-module(dperl_strategy_scr). + +-include("dperl.hrl"). + +-export([execute/4]). + +-ifdef(TEST). +-export([load_src_after_key/3, load_dst_after_key/3]). +-endif. + +-type state() :: any(). + +-callback connect_check_src(state()) -> + {ok, state()} | {error, any()} | {error, any(), state()}. +-callback get_source_events(state(), integer()) -> + {error, any()} | {ok, list(), state()} | {ok, sync_complete, state()}. +-callback connect_check_dst(state()) -> + {ok, state()} | {error, any()} | {error, any(), state()}. +-callback fetch_src(any(), state()) -> ?NOT_FOUND | term(). +-callback fetch_dst(any(), state()) -> ?NOT_FOUND | term(). +-callback delete_dst(any(), state()) -> {true, state()} | {false, state()}. +-callback insert_dst(any(), any(), state()) -> {true, state()} | {false, state()}. +-callback update_dst(any(), any(), state()) -> {true, state()} | {false, state()}. +-callback report_status(any(), any(), state()) -> ok | {error, any()}. +-callback do_refresh(state(), integer()) -> + {error, any()} | {ok, state()} | {ok, finish, state()}. + +% optional callbacks +-callback should_cleanup(ddTimestamp()|undefined, + ddTimestamp()|undefined, + integer(), integer(), state()) -> true | false. +-callback should_refresh(ddTimestamp()|undefined, + ddTimestamp()|undefined, + integer(), integer(), [integer()], state()) -> true | false. +-callback is_equal(any(), any(), any(), state()) -> true | false. +-callback update_channel(any(), boolean(), any(), any(), state()) -> {true, state()} | {false, state()}. +-callback finalize_src_events(state()) -> {true, state()} | {false, state()}. +-callback should_sync_log(state()) -> true | false. + +-optional_callbacks([should_cleanup/5, should_refresh/6, is_equal/4, + update_channel/5, finalize_src_events/1, + should_sync_log/1]). + +-callback do_cleanup(state(), integer()) -> + {error, any()} | {ok, state()} | {ok, finish, state()}. +-callback do_cleanup(list(), list(), boolean(), state()) -> + {error, any()} | {ok, state()} | {ok, finish, state()}. +-callback do_cleanup(list(), list(), list(), boolean(), state()) -> + {error, any()} | {ok, state()} | {ok, finish, state()}. +-optional_callbacks([do_cleanup/2, do_cleanup/4,do_cleanup/5]). + +% chunked cleanup context +-record(cleanup_ctx, + {srcKeys :: list(), + srcCount :: integer(), + dstKeys :: list(), + dstCount :: integer(), + bulkCount :: integer(), + minKey :: any(), + maxKey :: any(), + lastKey :: any(), + deletes = [] :: list(), + differences = [] :: list(), + inserts = [] :: list()}). + + +-define(DL(__S,__F,__A), + ?Debug([{state,__S},{mod, Mod},{job, Job}],__F,__A)). +-define(DL(__S,__F), ?DL(__S,__F,[])). + +-define(S(__F), ?DL(sync,__F)). +-define(S(__F,__A), ?DL(sync,__F,__A)). +-define(C(__F), ?DL(cleanup,__F)). +-define(C(__F,__A), ?DL(cleanup,__F,__A)). +-define(R(__F), ?DL(refresh,__F)). +-define(R(__F,__A), ?DL(refresh,__F,__A)). + +-define(RESTART_AFTER(__Timeout, __Args), + erlang:send_after(__Timeout, self(), + {internal, {behavior, ?MODULE, __Args}})). + +-spec execute(atom(), string(), state(), map()) -> state(). +execute(Mod, Job, State, #{sync := _, cleanup := _, refresh := _} + = Args) + when is_map(Args) -> + try execute(sync, Mod, Job, State, Args) catch + Class:{step_failed, NewArgs} when is_map(NewArgs) -> + ?JError("~p ~p step_failed~n~p", [Mod, Class, erlang:get_stacktrace()]), + dperl_dal:update_job_dyn(Job, error), + ?RESTART_AFTER(?CYCLE_ERROR_WAIT(Mod, Job), NewArgs), + dperl_dal:job_error(get(jstate), atom_to_binary(Class, utf8), Class), + State; + Class:{step_failed, NewState} -> + ?JError("~p ~p step_failed~n~p", [Mod, Class, erlang:get_stacktrace()]), + dperl_dal:update_job_dyn(Job, Mod:get_status(NewState), error), + ?RESTART_AFTER(?CYCLE_ERROR_WAIT(Mod, Job), Args), + dperl_dal:job_error(get(jstate), <<"step failed">>, Class), + NewState; + Class:Error -> + ?JError("~p ~p ~p~n~p", [Mod, Class, Error, erlang:get_stacktrace()]), + dperl_dal:update_job_dyn(Job, error), + ?RESTART_AFTER(?CYCLE_ERROR_WAIT(Mod, Job), Args), + dperl_dal:job_error(get(jstate), atom_to_binary(Class, utf8), Class), + State + end; +execute(Mod, Job, State, Args) when is_map(Args) -> + execute(Mod, Job, State, + maps:merge(#{sync => true, cleanup => true, refresh => true}, + Args)). + +-spec execute(sync|cleanup|refresh, atom(), string(), state(), map()) -> + state() | no_return(). +% [sync] +execute(sync, Mod, Job, State, #{sync := Sync} = Args) -> + put(jstate,s), + ?S("Connect/check source if not already connected (trivial for push)"), + State1 = + case Mod:connect_check_src(State) of + {error, Error, S1} -> + ?JError("sync(~p) failed at connect_check_src : ~p", [Mod, Error]), + dperl_dal:job_error(<<"sync">>, <<"connect_check_src">>, Error), + error({step_failed, S1}); + {error, Error} -> + ?JError("sync(~p) failed at connect_check_src : ~p", [Mod, Error]), + dperl_dal:job_error(<<"sync">>, <<"connect_check_src">>, Error), + error(step_failed); + {ok, S1} -> + dperl_dal:job_error_close(), + S1 + end, + if Sync == true -> + ?S("Get pending list of events (max n) to process from source"), + case Mod:get_source_events(State1, ?MAX_BULK_COUNT(Mod, Job)) of + {error, Error1, S2} -> + ?JError("sync(~p) failed at get_source_events : ~p", [Mod, Error1]), + dperl_dal:job_error(<<"sync">>, <<"get_source_events">>, Error1), + error({step_failed, S2}); + {error, Error1} -> + ?JError("sync(~p) failed at get_source_events : ~p", [Mod, Error1]), + dperl_dal:job_error(<<"sync">>, <<"get_source_events">>, Error1), + error({step_failed, State1}); + {ok, sync_complete, S2} -> + ?S("If lists of pending events is empty: goto [cleanup]"), + dperl_dal:job_error_close(), + execute(cleanup, Mod, Job, S2, Args); + {ok, [], S2} -> + ?S("no pending events, re-enter [sync] after cycleAlwaysWait"), + execute(finish, Mod, Job, S2, Args); + {ok, Events, S2} -> + ?S("Connect to destination if not already connected (trivial for pull)"), + dperl_dal:job_error_close(), + State3 = + case Mod:connect_check_dst(S2) of + {error, Error1, S3} -> + ?JError("sync(~p) failed at connect_check_dst : ~p", [Mod, Error1]), + dperl_dal:job_error(<<"sync">>, <<"connect_check_dst">>, Error1), + error({step_failed, S3}); + {error, Error1} -> + ?JError("sync(~p) failed at connect_check_dst : ~p", [Mod, Error1]), + dperl_dal:job_error(<<"sync">>, <<"connect_check_dst">>, Error1), + error(step_failed); + {ok, S3} -> + dperl_dal:job_error_close(), + S3 + end, + ?S("Process the events; goto [finish]"), + case process_events(Events, Mod, State3) of + {true , State4} -> + ?JError("sync(~p) failed at process_events", [Mod]), + dperl_dal:job_error(<<"sync">>, <<"process_src_events">>, <<"error">>), + error({step_failed, State4}); + {false, State4} -> + dperl_dal:update_job_dyn(Job, Mod:get_status(State4), synced), + dperl_dal:job_error_close(), + execute(finish, Mod, Job, State4, Args); + %% idle used for dperl_mec_ic to have idle timeout on + %% Try later error from oracle + %% would be removed in the future when new + %% behavior is used for mec_ic + {idle, State4} -> + execute(idle, Mod, Job, State4, Args) + end + end; + true -> + ?S("disabled! trying cleanup"), + execute(cleanup, Mod, Job, State1, Args) + end; + +% [cleanup] +execute(cleanup, Mod, Job, State, #{cleanup := true} = Args) -> + put(jstate,c), + #{lastAttempt := LastAttempt, + lastSuccess := LastSuccess} = CleanupState = get_state(cleanup, Job), + ShouldCleanupFun = + case erlang:function_exported(Mod, should_cleanup, 5) of + true -> fun(LA, LS, BI, CI) -> Mod:should_cleanup(LA, LS, BI, CI, State) end; + false -> fun(LA, LS, BI, CI) -> should_cleanup(LA, LS, BI, CI) end + end, + case apply(ShouldCleanupFun, + [LastAttempt, LastSuccess, ?CLEANUP_BATCH_INTERVAL(Mod, Job), + ?CLEANUP_INTERVAL(Mod, Job)]) of + false -> + ?C("(sync phase was nop) if last cleanup + cleanupInterval < now goto [refresh]"), + execute(refresh, Mod, Job, State, Args); + true -> + set_state(cleanup, Job, start), + Args1 = + if LastAttempt =< LastSuccess -> + ?JInfo("Starting cleanup cycle"), + case Args of + #{stats := #{cleanup_count := CC} = Stats} -> + Args#{stats => Stats#{cleanup_count => CC + 1}}; + Args -> + Stats = maps:get(stats, Args, #{}), + Args#{stats => Stats#{cleanup_count => 1}} + end; + true -> + case Args of + #{stats := #{cleanup_count := CC} = Stats} -> + Args#{stats => Stats#{cleanup_count => CC + 1}}; + Args -> + ?JInfo("Resuming cleanup cycle"), + Stats = maps:get(stats, Args, #{}), + Args#{stats => Stats#{cleanup_count => 1}} + end + end, + ?C("Connect to destination if not already connected (trivial for pull)"), + State1 = + case Mod:connect_check_dst(State) of + {error, Error, S1} -> + ?JError("cleanup(~p) failed at connect_check_dst : ~p", [Mod, Error]), + dperl_dal:job_error(<<"cleanup">>, <<"connect_check_dst">>, Error), + error({step_failed, S1}); + {error, Error} -> + ?JError("cleanup(~p) failed at connect_check_dst : ~p", [Mod, Error]), + dperl_dal:job_error(<<"cleanup">>, <<"connect_check_dst">>, Error), + error(step_failed); + {ok, S1} -> + dperl_dal:job_error_close(), + S1 + end, + ?C("Read and compare list of active keys between source and destination"), + ?C("Build a list of provisioning actions to be taken (aggregated audit list)"), + ?C("If list provisioning action is non empty: perform the actions; goto [finish]"), + CleanupBulkCount = ?MAX_CLEANUP_BULK_COUNT(Mod, Job), + DoCleanupArgs = + case (erlang:function_exported(Mod, load_src_after_key, 3) andalso + erlang:function_exported(Mod, load_dst_after_key, 3)) of + false -> [State1, CleanupBulkCount]; + true -> + #{minKey := MinKey, maxKey := MaxKey, + lastKey := LastKey} = CleanupState, + #{deletes := Deletes, inserts := Inserts, + differences := Diffs, lastKey := NextLastKey} = + cleanup_refresh_collect( + Mod, + #cleanup_ctx{minKey = MinKey, maxKey = MaxKey, + lastKey = LastKey, bulkCount = CleanupBulkCount}, + State1), + % update last key + case dperl_dal:select( + ?JOBDYN_TABLE, + [{#dperlNodeJobDyn{name=Job,_='_'},[],['$_']}]) of + {[#dperlNodeJobDyn{state = #{cleanup := OldCleanupState} + = NodeJobDynState}], true} + when is_map(OldCleanupState) -> + dperl_dal:update_job_dyn( + Job, + NodeJobDynState#{ + cleanup => + (case Args1 of + #{stats := #{cleanup_count := CC2}} -> + OldCleanupState#{count => CC2}; + Args1 -> OldCleanupState + end)#{lastKey => NextLastKey}}); + _ -> ok + end, + cleanup_log("Orphan", Deletes), + cleanup_log("Missing", Inserts), + cleanup_log("Difference", Diffs), + case erlang:function_exported(Mod, do_cleanup, 5) of + true -> [Deletes, Inserts, Diffs, NextLastKey == MinKey, State1]; + false -> [Deletes, Inserts, NextLastKey == MinKey, State1] + end + end, + case apply(Mod, do_cleanup, DoCleanupArgs) of + {error, Error1} -> + ?JError("cleanup(~p) failed at do_cleanup : ~p", [Mod, Error1]), + dperl_dal:job_error(<<"cleanup">>, <<"do_cleanup">>, Error1), + error({step_failed, Args1}); + {error, Error1, S2} -> + ?JError("cleanup(~p) failed at do_cleanup : ~p", [Mod, Error1]), + dperl_dal:job_error(<<"cleanup">>, <<"do_cleanup">>, Error1), + error({step_failed, S2}); + {ok, S2} -> + dperl_dal:job_error_close(), + if length(DoCleanupArgs) == 2 -> + set_state( + cleanup, Job, start, + case Args1 of + #{stats := #{cleanup_count := CC0}} -> + (Mod:get_status(S2))#{count => CC0}; + Args1 -> Mod:get_status(S2) + end); + true -> no_op + end, + execute(finish, Mod, Job, S2, Args1); + {ok, finish, S2} -> + set_state( + cleanup, Job, stop, + case Args1 of + #{stats := #{cleanup_count := CC1}} -> + (Mod:get_status(S2))#{count => CC1}; + Args1 -> Mod:get_status(S2) + end), + dperl_dal:job_error_close(), + ?JInfo("Cleanup cycle is complete"), + execute(finish, Mod, Job, S2, Args1) + end + end; +execute(cleanup, Mod, Job, State, Args) -> + put(jstate,c), + ?C("disabled! trying refresh"), + execute(refresh, Mod, Job, State, Args); + +% [refresh] +execute(refresh, Mod, Job, State, #{refresh := true} = Args) -> + put(jstate,r), + #{lastAttempt := LastAttempt, + lastSuccess := LastSuccess} = get_state(refresh, Job), + ShouldRefreshFun = + case erlang:function_exported(Mod, should_refresh, 6) of + true -> fun(LA, LS, BI, RI, RH) -> Mod:should_refresh(LA, LS, BI, RI, RH, State) end; + false -> fun(LA, LS, BI, RI, RH) -> should_refresh(LA, LS, BI, RI, RH) end + end, + case apply(ShouldRefreshFun, + [LastAttempt, LastSuccess, ?REFRESH_BATCH_INTERVAL(Mod, Job), + ?REFRESH_INTERVAL(Mod, Job), ?REFRESH_HOURS(Mod, Job)]) of + false -> + ?R("If last refresh + refreshInterval < now(): goto [idle]"), + ?R("If current hour is not in refreshHours): goto [idle]"), + execute(idle, Mod, Job, State, Args); + true -> + set_state(refresh, Job, start), + Args1 = + if LastAttempt =< LastSuccess -> + ?JInfo("Starting refresh cycle"), + case Args of + #{stats := #{refresh_count := RC} = Stats} -> + Args#{stats => Stats#{refresh_count => RC + 1}}; + Args -> + Stats = maps:get(stats, Args, #{}), + Args#{stats => Stats#{refresh_count => 1}} + end; + true -> + case Args of + #{stats := #{refresh_count := RC} = Stats} -> + Args#{stats => Stats#{refresh_count => RC + 1}}; + Args -> + ?JInfo("Resuming refresh cycle"), + Stats = maps:get(stats, Args, #{}), + Args#{stats => Stats#{refresh_count => 1}} + end + end, + ?R("Connect to destination if not already connected (trivial for pull)"), + State1 = + case Mod:connect_check_dst(State) of + {error, Error, S1} -> + ?JError("refresh(~p) failed at connect_check_dst : ~p", [Mod, Error]), + dperl_dal:job_error(<<"refresh">>, <<"connect_check_dst">>, Error), + error({step_failed, S1}); + {error, Error} -> + ?JError("refresh(~p) failed at connect_check_dst : ~p", [Mod, Error]), + dperl_dal:job_error(<<"refresh">>, <<"connect_check_dst">>, Error), + error(step_failed); + {ok, S1} -> + dperl_dal:job_error_close(), + S1 + end, + ?R("Read and compare values between source and existing destination keys"), + ?R("Build a list provisioning actions to be taken"), + ?R("If list of provisioning actions is empty: goto [finish]"), + ?R("Perform the actions: goto [finish]"), + case Mod:do_refresh(State1, ?MAX_REFRESH_BULK_COUNT(Mod, Job)) of + {error, Error1} -> + ?JError("refresh(~p) failed at do_refresh : ~p", [Mod, Error1]), + dperl_dal:job_error(<<"refresh">>, <<"do_refresh">>, Error1), + error({step_failed, Args1}); + {ok, S2} -> + set_state( + refresh, Job, start, + case Args1 of + #{stats := #{refresh_count := RC0}} -> + (Mod:get_status(S2))#{count => RC0}; + Args1 -> Mod:get_status(S2) + end), + dperl_dal:job_error_close(), + execute(finish, Mod, Job, S2, Args1); + {ok, finish, S2} -> + set_state( + refresh, Job, stop, + case Args1 of + #{stats := #{refresh_count := RC1}} -> + (Mod:get_status(S2))#{count => RC1}; + Args1 -> Mod:get_status(S2) + end), + dperl_dal:job_error_close(), + ?JInfo("Refresh cycle is complete"), + execute(finish, Mod, Job, S2, Args1) + + end + end; +execute(refresh, Mod, Job, State, Args) -> + put(jstate,r), + ?R("disabled! going idle"), + execute(idle, Mod, Job, State, Args); +execute(idle, Mod, Job, State, Args) -> + put(jstate,i), + ?RESTART_AFTER(?CYCLE_IDLE_WAIT(Mod, Job), Args), + dperl_dal:update_job_dyn(Job, Mod:get_status(State), idle), + State; +execute(finish, Mod, Job, State, Args) -> + put(jstate,f), + ?RESTART_AFTER(?CYCLE_ALWAYS_WAIT(Mod, Job), Args), + State. + +-spec get_state(cleanup|refresh, binary()) -> + {ddTimestamp() | undefined, ddTimestamp() | undefined}. +get_state(Type, Job) when (Type == cleanup orelse Type == refresh) + andalso is_binary(Job) -> + maps:merge( + if Type == cleanup -> + #{minKey => -1, maxKey => <<255>>, lastKey => 0}; + true -> #{} + end, + case dperl_dal:select( + ?JOBDYN_TABLE, + [{#dperlNodeJobDyn{name=Job,state='$1',_='_'},[],['$1']}]) of + {[#{Type:=State}], true} when is_map(State) -> State; + {_, true} -> #{lastAttempt => ?EPOCH, lastSuccess => ?EPOCH} + end). + +-spec set_state(cleanup|refresh, binary(), start | stop) -> any(). +set_state(Type, Job, Status) -> set_state(Type, Job, Status, #{}). + +-spec set_state(cleanup|refresh, binary(), start | stop, map()) -> any(). +set_state(Type, Job, Status, State0) + when (Type == cleanup orelse Type == refresh) andalso + (Status == start orelse Status == stop) andalso is_binary(Job) -> + {NodeJobDyn, NewStatus0} = + case dperl_dal:select( + ?JOBDYN_TABLE, + [{#dperlNodeJobDyn{name=Job,_='_'},[],['$_']}]) of + {[#dperlNodeJobDyn{state=#{Type:=OldState}} = NJD], true} + when is_map(OldState) -> + {NJD, OldState}; + {[#dperlNodeJobDyn{} = NJD], true} -> + {NJD, #{lastAttempt => os:timestamp(), + lastSuccess => ?EPOCH}} + end, + {NewStatus, State} = + case maps:get(count, State0, '$not_found') of + '$not_found' -> {NewStatus0, State0}; + Count -> + {NewStatus0#{count => Count}, maps:remove(count, State0)} + end, + TypeState = case {Type, Status} of + {cleanup, start} -> cleaning; + {cleanup, stop} -> cleaned; + {refresh, start} -> refreshing; + {refresh, stop} -> refreshed + end, + % create 'Type' state if doesn't exists + % if exists update 'LastSuccess' timestamp to current time + NodeJobDynState = NodeJobDyn#dperlNodeJobDyn.state, + dperl_dal:update_job_dyn( + Job, + maps:merge( + NodeJobDynState#{ + Type => + if Status == start -> + NewStatus#{lastAttempt => imem_meta:time()}; + true -> + NewStatus#{lastSuccess => imem_meta:time()} + end}, State), TypeState). + +% +% default callbacks +% +should_cleanup(LastAttempt, LastSuccess, BatchInterval, Interval) -> + if LastAttempt > LastSuccess -> + imem_datatype:msec_diff(LastAttempt) > BatchInterval; + true -> + imem_datatype:msec_diff(LastSuccess) > Interval + end. + +should_refresh(LastAttempt, LastSuccess, BatchInterval, Interval, Hours) -> + if LastAttempt > LastSuccess -> + imem_datatype:msec_diff(LastAttempt) > BatchInterval; + true -> + case imem_datatype:msec_diff(LastSuccess) > Interval of + false -> false; + true -> + if length(Hours) > 0 -> + {Hour,_,_} = erlang:time(), + case lists:member(Hour, Hours) of + true -> true; + _ -> false + end; + true -> true + end + end + end. + +is_equal(_Key, S, S, _State) -> true; +is_equal(_Key, S, D, _State) when is_map(S), is_map(D) -> + dperl_dal:normalize_map(S) == dperl_dal:normalize_map(D); +is_equal(_Key, S, D, _State) when is_list(S), is_list(D) -> + lists:sort(S) == lists:sort(D); +is_equal(_Key, _, _, _State) -> false. + +cleanup_log(_Msg, []) -> no_op; +cleanup_log(Msg, [K | _] = Keys) when is_integer(K) -> + ?JWarn("~s (~p) ~w", [Msg, length(Keys), Keys]); +cleanup_log(Msg, Keys) -> + ?JWarn("~s (~p) ~p", [Msg, length(Keys), Keys]). + +sync_log(_, _, false) -> no_op; +sync_log(Msg, {Key, _}, ShouldLog) -> sync_log(Msg, Key, ShouldLog); +sync_log(Msg, Key, _) when is_binary(Key) -> ?JInfo("~s : ~s", [Msg, Key]); +sync_log(Msg, Key, _) -> ?JInfo("~s : ~p", [Msg, Key]). + +%%---------------- +%% chunked cleanup +%% + +process_events(Keys, Mod, State) -> + ShouldLog = + case erlang:function_exported(Mod, should_sync_log, 1) of + true -> Mod:should_sync_log(State); + false -> true + end, + process_events(Keys, Mod, State, ShouldLog, false). + +process_events([], Mod, State, _ShouldLog, IsError) -> + case erlang:function_exported(Mod, finalize_src_events, 1) of + true -> execute_prov_fun(no_log, Mod, finalize_src_events, [State], false, IsError); + false -> {IsError, State} + end; +process_events([Key | Keys], Mod, State, ShouldLog, IsError) -> + {NewIsError, NewState} = + case {Mod:fetch_src(Key, State), Mod:fetch_dst(Key, State)} of + {S, S} -> + Mod:report_status(Key, no_op, State), + {IsError, State}; %% nothing to do + {{protected, _}, ?NOT_FOUND} -> % pusher protection + ?JError("Protected ~p is not found on target", [Key]), + Error = <<"Protected key is not found on target">>, + Mod:report_status(Key, Error, State), + dperl_dal:job_error(Key, <<"sync">>, <<"process_events">>, Error), + {true, State}; + {{protected, S}, D} -> % pusher protection + execute_prov_fun("Protected", Mod, update_channel, [Key, true, S, D, State], ShouldLog, IsError, check); + {{protected, IsSamePlatform, S}, D} -> % puller protection + execute_prov_fun("Protected", Mod, update_channel, [Key, IsSamePlatform, S, D, State], ShouldLog, IsError, check); + {?NOT_FOUND, _D} -> execute_prov_fun("Deleted", Mod, delete_dst, [Key, State], ShouldLog, IsError); + {S, ?NOT_FOUND} -> execute_prov_fun("Inserted", Mod, insert_dst, [Key, S, State], ShouldLog, IsError); + {error, _} -> {true, State}; + {_, error} -> {true, State}; + {{error, _} = Error, _} -> + ?JError("Fetch src ~p : ~p", [Key, Error]), + Mod:report_status(Key, Error, State), + {true, State}; + {_, {error, _} = Error} -> + ?JError("Fetch dst ~p : ~p", [Key, Error]), + Mod:report_status(Key, Error, State), + {true, State}; + {{error, Error, State1}, _} -> + ?JError("Fetch src ~p : ~p", [Key, Error]), + Mod:report_status(Key, {error, Error}, State1), + {true, State1}; + {_, {error, Error, State1}} -> + ?JError("Fetch dst ~p : ~p", [Key, Error]), + Mod:report_status(Key, {error, Error}, State1), + {true, State1}; + {S, D} -> + DiffFun = + case erlang:function_exported(Mod, is_equal, 4) of + true -> fun Mod:is_equal/4; + false -> fun is_equal/4 + end, + case DiffFun(Key, S, D, State) of + false -> execute_prov_fun("Updated", Mod, update_dst, [Key, S, State], ShouldLog, IsError); + true -> + Mod:report_status(Key, no_op, State), + {IsError, State} + end + end, + process_events(Keys, Mod, NewState, ShouldLog, NewIsError). + +execute_prov_fun(Op, Mod, Fun, Args, ShouldLog, IsError) -> + case catch apply(Mod, Fun, Args) of + {false, NewState} -> + sync_log(Op, hd(Args), ShouldLog), + {IsError, NewState}; + {true, NewState} -> {true, NewState}; + {idle, NewState} -> {idle, NewState}; + Error -> + case Op of + finalize_src_events -> + ?JError("Executing : ~p Error : ~p", [Fun, Error]); + _ -> + ?JError("Executing : ~p for key : ~p Error : ~p", + [Fun, hd(Args), Error]) + end, + {true, lists:last(Args)} + end. + +execute_prov_fun(Op, Mod, Fun, Args, ShouldLog, IsError, check) -> + case erlang:function_exported(Mod, Fun, length(Args)) of + true -> execute_prov_fun(Op, Mod, Fun, Args, ShouldLog, IsError); + false -> + ?Error("Function : ~p not exported in mod : ~p", [Fun, Mod]), + {true, lists:last(Args)} + end. + +-spec cleanup_refresh_collect(atom(), #cleanup_ctx{}, state()) -> + #{deletes => list(), inserts => list(), lastKey => any()}. +cleanup_refresh_collect(Mod, + #cleanup_ctx{minKey = MinKey, maxKey = MaxKey, + lastKey = LastKey, bulkCount = BulkCnt} = CleanupCtx, + State) -> + CurKey = if + LastKey =< MinKey -> MinKey; % throw to cycle start if getting + LastKey >= MaxKey -> MinKey; % out of key bounds by re-config + true -> LastKey + end, + SrcKeys = Mod:load_src_after_key(CurKey, BulkCnt, State), + DstKeys = Mod:load_dst_after_key(CurKey, BulkCnt, State), + cleanup_refresh_compare(CleanupCtx#cleanup_ctx{ + srcKeys = SrcKeys, srcCount = length(SrcKeys), + dstKeys = DstKeys, dstCount = length(DstKeys), lastKey = CurKey}). + +-spec cleanup_refresh_compare(#cleanup_ctx{}) -> #{deletes => list(), differences => list(), inserts => list(), lastKey => any()}. +cleanup_refresh_compare(#cleanup_ctx{ + srcKeys = SrcKeys, dstKeys = [], deletes = Deletes, + inserts = Inserts, minKey = MinKey, differences = Diffs, + dstCount = DstCount, bulkCount = BulkCnt, srcCount = SrcCount}) + when DstCount < BulkCnt, SrcCount < BulkCnt -> + Remaining = fetch_keys(SrcKeys), + #{deletes => Deletes, differences => Diffs, inserts => Inserts++Remaining, lastKey => MinKey}; +cleanup_refresh_compare(#cleanup_ctx{srcKeys = SrcKeys, dstKeys = [], deletes = Deletes, dstCount = DstCount, + inserts = Inserts, differences = Diffs}) + when DstCount == 0 -> + Remaining = fetch_keys(SrcKeys), + #{deletes => Deletes, differences => Diffs, inserts => Inserts++Remaining, lastKey => last_key(SrcKeys)}; +cleanup_refresh_compare(#cleanup_ctx{dstKeys = [], deletes = Deletes, differences = Diffs, + inserts = Inserts, lastKey = LK}) -> + #{deletes => Deletes, differences => Diffs, inserts => Inserts, lastKey => LK}; +cleanup_refresh_compare(#cleanup_ctx{ + srcCount = SrcCount, dstKeys = DstKeys, bulkCount = BulkCnt, + minKey = MinKey, srcKeys = [], deletes = Deletes, + inserts = Inserts, dstCount = DstCount, differences = Diffs}) + when SrcCount < BulkCnt, DstCount < BulkCnt -> + Remaining = fetch_keys(DstKeys), + #{deletes => Deletes++Remaining, differences => Diffs, inserts => Inserts, lastKey => MinKey}; +cleanup_refresh_compare(#cleanup_ctx{srcKeys = [], deletes = Deletes, + inserts = Inserts, dstKeys = DstKeys, + differences = Diffs, srcCount = SrcCount}) + when SrcCount == 0 -> + Remaining = fetch_keys(DstKeys), + #{deletes => Deletes ++ Remaining, differences => Diffs, inserts => Inserts, lastKey => last_key(DstKeys)}; +cleanup_refresh_compare(#cleanup_ctx{srcKeys = [], deletes = Deletes, differences = Diffs, + inserts = Inserts, lastKey = LK}) -> + #{deletes => Deletes, differences => Diffs, inserts => Inserts, lastKey => LK}; +cleanup_refresh_compare(#cleanup_ctx{srcKeys = [K|SrcKeys], dstKeys = [K|DstKeys]} + = CleanupCtx) -> + cleanup_refresh_compare(CleanupCtx#cleanup_ctx{srcKeys = SrcKeys, dstKeys = DstKeys, + lastKey = last_key([K])}); +cleanup_refresh_compare(#cleanup_ctx{srcKeys = [{K, _} | SrcKeys], dstKeys = [{K, _} | DstKeys], + differences = Diffs} = CleanupCtx) -> + cleanup_refresh_compare(CleanupCtx#cleanup_ctx{srcKeys = SrcKeys, dstKeys = DstKeys, + lastKey = K, differences = [K | Diffs]}); +cleanup_refresh_compare(#cleanup_ctx{srcKeys = [SK|SrcKeys], dstKeys = [DK | DstKeys], + inserts = Inserts, deletes = Deletes} = CleanupCtx) -> + case {last_key([SK]), last_key([DK])} of + {K1, K2} when K1 < K2 -> cleanup_refresh_compare( + CleanupCtx#cleanup_ctx{srcKeys = SrcKeys, + inserts = [K1|Inserts], lastKey = K1}); + {K1, K2} when K2 < K1 -> cleanup_refresh_compare( + CleanupCtx#cleanup_ctx{dstKeys = DstKeys, + deletes = [K2|Deletes], lastKey = K2}) + end. + +fetch_keys([]) -> []; +fetch_keys([{_, _} | _] = KVs) -> [K || {K, _} <- KVs]; +fetch_keys(Keys) -> Keys. + +last_key([{_, _} | _] = KVs) -> element(1, lists:last(KVs)); +last_key(Keys) -> lists:last(Keys). + +%% ---------------------- + + +%% ---------------------- +%% Eunit Tests +%% ---------------------- + +-ifdef(TEST). + +-include_lib("eunit/include/eunit.hrl"). + +load_batch(CurKey, BulkCnt, Keys) -> + lists:sort(lists:foldl( + fun({K, _} = E , Acc) -> + if length(Acc) < BulkCnt andalso K > CurKey -> [E | Acc]; + true -> Acc + end; + (E, Acc) -> + if length(Acc) < BulkCnt andalso E > CurKey -> [E | Acc]; + true -> Acc + end + end, [], Keys)). + +load_src_after_key(CurKey, BulkCnt, {SrcKeys, _}) -> + load_batch(CurKey, BulkCnt, SrcKeys). + +load_dst_after_key(CurKey, BulkCnt, {_, DstKeys}) -> + load_batch(CurKey, BulkCnt, DstKeys). + +cleanup_refresh_compare_test() -> + BulkCnt = 1000, + SrcCount = rand:uniform(1000), + SrcKeys = lists:usort([rand:uniform(3000) || _ <- lists:seq(1, SrcCount)]), + DstCount = rand:uniform(1000), + DstKeys = lists:usort([rand:uniform(3000) || _ <- lists:seq(1, DstCount)]), + #{deletes := Dels, inserts := Ins} = + cleanup_refresh_collect(?MODULE, + #cleanup_ctx{minKey = -1, maxKey = <<255>>, + lastKey = 0, bulkCount = BulkCnt}, + {SrcKeys, DstKeys}), + Cleaned = lists:sort(lists:foldr(fun(K, Acc) -> + case lists:member(K, Dels) of + true -> + lists:delete(K, Acc); + false -> Acc + end + end, DstKeys, Dels) ++ Ins), + ?assertEqual(Cleaned, SrcKeys). + +complete_cleanup_refresh(AllSrcKeys, AllDstKeys) -> + BulkCnt = 100, + MaxKey = <<255>>, + Ctx = #cleanup_ctx{minKey = -1, maxKey = MaxKey, lastKey = 0, + bulkCount = BulkCnt}, + #{deletes := Dels, differences := Diffs, inserts := Ins} = cleanup_refresh_loop(Ctx, 0, {AllSrcKeys, AllDstKeys}, #{}), + Cleaned = lists:usort(lists:foldr(fun(K, Acc) -> + case lists:member(K, Dels) of + true -> + lists:delete(K, Acc); + false -> Acc + end + end, fetch_keys(AllDstKeys), Dels) ++ Ins), + ?assertEqual(Cleaned, lists:usort(fetch_keys(AllSrcKeys))), + Diffs1 = lists:usort(lists:foldl( + fun({K, V}, Acc) -> + case lists:keyfind(K, 1, AllDstKeys) of + {K, V} -> Acc; + {K, _} -> [K | Acc]; + false -> Acc + end; + (_, Acc) -> Acc + end, [], AllSrcKeys)), + ?assertEqual(Diffs1, lists:usort(Diffs)). + +complete_cleanup_refresh(AllSrcKeys, AllDstKeys, BulkCnt) -> + MaxKey = <<255>>, + Ctx = #cleanup_ctx{minKey = -1, maxKey = MaxKey, lastKey = 0, + bulkCount = BulkCnt}, + cleanup_refresh_collect(?MODULE, Ctx, {AllSrcKeys, AllDstKeys}). + +cleanup_refresh_loop(_, -1, _, Acc) -> Acc; +cleanup_refresh_loop(Ctx, CurKey, AllKeys, Acc) -> + #{deletes := Dels, differences := Diffs, inserts := Ins, lastKey := LastKey} = + cleanup_refresh_collect(?MODULE, Ctx#cleanup_ctx{lastKey = CurKey}, AllKeys), + NewAcc = Acc#{deletes => Dels ++ maps:get(deletes, Acc, []), + differences => Diffs ++ maps:get(differences, Acc, []), + inserts => Ins ++ maps:get(inserts, Acc, [])}, + cleanup_refresh_loop(Ctx, LastKey, AllKeys, NewAcc). + +cleanup_only_test() -> + [ok] = lists:usort([begin + AllSrcKeys = lists:usort([rand:uniform(5000) || _ <- lists:seq(1, 2000)]), + AllDstKeys = lists:usort([rand:uniform(5000) || _ <- lists:seq(1, 2000)]), + complete_cleanup_refresh(AllSrcKeys, AllDstKeys) + end || _ <- lists:seq(1, 10)]), + AllSrcKeys1 = lists:usort([rand:uniform(5000) || _ <- lists:seq(1, 2000)]), + AllDstKeys1 = lists:usort([rand:uniform(5000) || _ <- lists:seq(1, 2000)]), + ok = complete_cleanup_refresh(AllSrcKeys1, AllDstKeys1), + %cleanup with SrcKeys missing 500 to 1000 + ok = complete_cleanup_refresh([K || K <- AllSrcKeys1, K < 500 orelse K > 1000], AllDstKeys1), + %cleanup with DstKeys missing 500 to 1000 + ok = complete_cleanup_refresh(AllSrcKeys1, [K || K <- AllDstKeys1, K < 500 orelse K > 1000]), + %cleanup with DstKeys as [] + ok = complete_cleanup_refresh(AllSrcKeys1, []), + %cleanup with SrcKeys as [] + ok = complete_cleanup_refresh([], AllDstKeys1). + +cleanup_refresh_test() -> + %% cleanup with refresh tests + %refresh test with differences + [ok] = lists:usort([begin + AllSrcKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), S} || S <- lists:seq(1, 2000)]))), + AllDstKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), S} || S <- lists:seq(1, 2000)]))), + complete_cleanup_refresh(AllSrcKeys, AllDstKeys) + end || _ <- lists:seq(1,10)]), + [ok] = lists:usort([begin + AllSrcKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), rand:uniform(5000)} || _ <- lists:seq(1, 2000)]))), + AllDstKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), rand:uniform(5000)} || _ <- lists:seq(1, 2000)]))), + complete_cleanup_refresh(AllSrcKeys, AllDstKeys) + end || _ <- lists:seq(1,10)]), + AllSrcKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), rand:uniform(5000)} || _ <- lists:seq(1, 2000)]))), + AllDstKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), rand:uniform(5000)} || _ <- lists:seq(1, 2000)]))), + ok = complete_cleanup_refresh(AllSrcKeys, AllDstKeys), + %cleanup refresh with SrcKeys missing 500 to 1000 + ok = complete_cleanup_refresh([{K, V} || {K, V} <- AllSrcKeys, K < 500 orelse K > 1000], AllDstKeys), + %cleanup refresh with DstKeys missing 500 to 1000 + ok = complete_cleanup_refresh(AllSrcKeys, [{K, V} || {K, V} <- AllDstKeys, K < 500 orelse K > 1000]). + +cleanup_refresh_boundary_test() -> + AllSrcKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), rand:uniform(5000)} || _ <- lists:seq(1, 2000)]))), + AllDstKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), rand:uniform(5000)} || _ <- lists:seq(1, 2000)]))), + %cleanup refresh with DstKeys as [] + ok = complete_cleanup_refresh(AllSrcKeys, []), + %cleanup refresh with SrcKeys as [] + ok = complete_cleanup_refresh([], AllDstKeys), + %cleanup refresh with DstKeys as [{1, 10}] + ok = complete_cleanup_refresh(AllSrcKeys, [{1, 10}]), + %cleanup refresh with SrcKeys as [{1, 10}] + ok = complete_cleanup_refresh([{1, 10}], AllDstKeys), + %cleanup refresh with less DstKeys + ok = complete_cleanup_refresh(AllSrcKeys, lists:sublist(AllDstKeys, 100)), + %cleanup refresh with less SrcKeys + ok = complete_cleanup_refresh(lists:sublist(AllSrcKeys, 100), AllDstKeys). + +cleanup_refresh_only_dels_test() -> + AllKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), rand:uniform(5000)} || _ <- lists:seq(1, 2000)]))), + DeleteKeys = [{6000, 6}, {7000, 7}, {8000, 8}], + #{inserts := Ins, deletes := Dels, differences := Diffs} = complete_cleanup_refresh(AllKeys, AllKeys ++ DeleteKeys, 2000), + ?assertEqual([], Diffs), + ?assertEqual([], Ins), + ?assertEqual([6000, 7000, 8000], Dels). + +cleanup_refresh_only_ins_test() -> + AllKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), rand:uniform(5000)} || _ <- lists:seq(1, 2000)]))), + InsertKeys = [{6000, 6}, {7000, 7}, {8000, 8}], + #{inserts := Ins, deletes := Dels, differences := Diffs} = complete_cleanup_refresh(AllKeys ++ InsertKeys, AllKeys, 2000), + ?assertEqual([], Diffs), + ?assertEqual([6000, 7000, 8000], Ins), + ?assertEqual([], Dels). + +cleanup_refresh_no_op_test() -> + AllKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), rand:uniform(5000)} || _ <- lists:seq(1, 2000)]))), + #{inserts := Ins, deletes := Dels, differences := Diffs} = complete_cleanup_refresh(AllKeys, AllKeys, 2000), + ?assertEqual([], Diffs), + ?assertEqual([], Ins), + ?assertEqual([], Dels). + +cleanup_refresh_no_diff_test() -> + AllSrcKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), 1} || _ <- lists:seq(1, 2000)]))), + AllDstKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), 1} || _ <- lists:seq(1, 2000)]))), + #{differences := Diffs} = complete_cleanup_refresh(AllSrcKeys, AllDstKeys, 2000), + ?assertEqual([], Diffs). + +-endif. diff --git a/src/dperl_sup.erl b/src/dperl_sup.erl new file mode 100644 index 00000000..e0b95398 --- /dev/null +++ b/src/dperl_sup.erl @@ -0,0 +1,93 @@ +-module(dperl_sup). +-behaviour(supervisor). + +-include("dperl.hrl"). + +%% API +-export([start_link/0]). + +%% Supervisor callbacks +-export([init/1]). + +%% Helper macro for declaring children of supervisor +-define(CHILD(I, Type), {I, {I, start_link, []}, permanent, 5000, Type, [I]}). + +%% =================================================================== +%% API functions +%% =================================================================== + +start_link() -> + ?Info("~p starting...~n", [?MODULE]), + lists:map( + fun({Table, ColumnNames, ColumnTypes, DefaultRecord, Opts}) -> + ok = dperl_dal:check_table( + Table, ColumnNames, ColumnTypes, DefaultRecord, Opts + ) + end, + [ + ?TABLESPEC(dperlJob,[]), + ?TABLESPEC( + ?JOBDYN_TABLE, dperlNodeJobDyn, + [ + {scope,local}, + {local_content,true}, + {record_name,dperlNodeJobDyn} + ] + ), + ?TABLESPEC(dperlService, []), + ?TABLESPEC( + ?SERVICEDYN_TABLE, dperlServiceDyn, + [ + {scope,local}, + {local_content,true}, + {record_name,dperlServiceDyn} + ] + ) + ] + ), + ok = dderl:add_d3_templates_path( + dderl, filename:join(priv_dir(), "dashboard_scripts") + ), + case supervisor:start_link({local, ?MODULE}, ?MODULE, []) of + {ok,_} = Success -> + ?Info("~p started!~n", [?MODULE]), + Success; + Error -> + ?Error("~p failed to start ~p~n", [?MODULE, Error]), + Error + end. + +%% =================================================================== +%% Supervisor callbacks +%% =================================================================== + +init([]) -> + {ok, + {#{strategy => one_for_one, intensity => 5, period => 10}, + [#{id => dperl_metrics, + start => {dperl_metrics, start_link, []}, + restart => permanent, shutdown => 5000, type => worker, + modules => [dperl_metrics]}, + #{id => dperl_job_sup, + start => {dperl_worker_sup, start_link, [job]}, + restart => permanent, shutdown => 60000, type => supervisor, + modules => [dperl_worker_sup]}, + #{id => dperl_service_sup, + start => {dperl_worker_sup, start_link, [service]}, + restart => permanent, shutdown => 60000, type => supervisor, + modules => [dperl_worker_sup]}, + #{id => dperl_job_cp, + start => {dperl_cp, start_link, [job]}, + restart => permanent, shutdown => 5000, type => worker, + modules => [dperl_cp]}, + #{id => dperl_service_cp, + start => {dperl_cp, start_link, [service]}, + restart => permanent, shutdown => 5000, type => worker, + modules => [dperl_cp]} + ]}}. + +priv_dir() -> + case code:priv_dir(?MODULE) of + {error, bad_name} -> "priv"; + PDir -> PDir + end. diff --git a/src/dperl_worker.erl b/src/dperl_worker.erl new file mode 100644 index 00000000..1435bd54 --- /dev/null +++ b/src/dperl_worker.erl @@ -0,0 +1,308 @@ +-module(dperl_worker). + +-include("dperl.hrl"). + +-behavior(gen_server). + +% behavior export +-export([behaviour_info/1]). + +% interface export +-export([start_link/3, start/1, stop/2, list/1, child_spec/2, child_id/2, + is_alive/2]). + +% gen_server exports +-export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, + code_change/3, format_status/2]). + +-record(st, {n, m, ms, js}). + +behaviour_info(callbacks) -> [{get_status,1}, {init_state,1} + | gen_server:behaviour_info(callbacks)]; +behaviour_info(Type) -> gen_server:behaviour_info(Type). + +start(JobOrService) when is_record(JobOrService, dperlJob); + is_record(JobOrService, dperlService) -> + + Mod = ?G(JobOrService,module), + Name = ?G(JobOrService,name), + Behaviors = + lists:reverse( + lists:foldl( + fun({behavior,[Behavior]}, Acc) -> + case code:ensure_loaded(Behavior) == {module, Behavior} andalso + erlang:function_exported(Behavior, execute, 4) of + true -> [Behavior|Acc]; + _ -> Acc + end; + (_, Acc) -> Acc + end, [], Mod:module_info(attributes))), + Sup = ?SUP(JobOrService), + ChildId = {Mod, Name}, + ChildSpec = #{id => ChildId, restart => transient, shutdown => 30000, + type => worker, modules => [Mod], + start => {?MODULE, start_link, [JobOrService, Behaviors, Sup]}}, + case supervisor:get_childspec(Sup, ChildId) of + {ok, ChildSpec} -> supervisor:restart_child(Sup, ChildId); + {ok, _} -> + supervisor:terminate_child(Sup, ChildId), + supervisor:delete_child(Sup, ChildId), + start(JobOrService); + {error, not_found} -> + case supervisor:start_child(Sup, ChildSpec) of + {ok, _} = Result -> Result; + {ok, _, _} = Result -> Result; + {error, {Error, _Child}} -> + ?Error([{enum, dperl_dal:to_atom(Name)}], + "~p starting ~p_~s: ~p", [Sup, Mod, Name, Error], []), + error(Error); + {error, Error} -> + ?Error([{enum, dperl_dal:to_atom(Name)}], + "~p starting ~p_~s: ~p", [Sup, Mod, Name, Error], []), + error(Error) + end + end. + +stop(Mod, Name) -> + case get_supervisor(Mod, Name) of + not_found -> + {error, not_found}; + Sup -> + case supervisor:terminate_child(Sup, {Mod, Name}) of + ok -> + case supervisor:delete_child(Sup, {Mod, Name}) of + ok -> ok; + {error, Error} -> + ?Error("~p stop ~p_~s (delete_child) : ~p", + [Sup, Mod, Name, Error]), + {error, Error} + end; + {error, not_found} -> {error, not_found}; + {error, Error} -> + ?Error("~p stop ~p_~s (terminate_child) : ~p", + [Sup, Mod, Name, Error]), + error(Error) + end + end. + +get_supervisor(Mod, Name) -> + case whereis(child_id(Mod, Name)) of + Pid when is_pid(Pid) -> + {dictionary, Dictionary} = erlang:process_info(Pid, dictionary), + case lists:keyfind(supervisor, 1, Dictionary) of + {supervisor, Sup} -> + Sup; + _ -> + not_found + end; + undefined -> + not_found + end. + +list(job) -> + list(whereis(dperl_job_sup)); +list(service) -> + list(whereis(dperl_service_sup)); +list(undefined) -> + []; +list(Pid) when is_pid(Pid) -> + case erlang:process_info(Pid, links) of + {links, [_]} -> []; + {links, Links} -> + lists:map( + fun(ChildPid) -> + {dictionary, Dictionary} = erlang:process_info(ChildPid, dictionary), + {name, Name} = lists:keyfind(name, 1, Dictionary), + {module, Mod} = lists:keyfind(module, 1, Dictionary), + {Mod, Name} + end, Links -- [whereis(dperl_sup)]); + _ -> [] + end. + +child_spec(job, Id) -> supervisor:get_childspec(dperl_job_sup, Id); +child_spec(service, Id) -> supervisor:get_childspec(dperl_service_sup, Id). + +is_alive(Type, Name) when Type == job; Type == service -> + lists:any( + fun({_, JobOrServiceName}) -> + Name == JobOrServiceName + end, list(Type) + ). + +start_link(JobOrService, Behaviors, Sup) + when is_record(JobOrService, dperlJob); + is_record(JobOrService, dperlService) -> + Mod = ?G(JobOrService,module), + Name = ?G(JobOrService,name), + Opts = ?G(JobOrService,opts), + Sup = ?SUP(JobOrService), + case gen_server:start_link({local, child_id(Mod, Name)}, ?MODULE, + {JobOrService, Behaviors, Sup}, + Opts) of + {ok, Pid} -> + ?Debug([{enum, dperl_dal:to_atom(Name)}], + "~p started ~p_~s: ~p", [Sup, Mod, Name, Pid]), + {ok, Pid}; + {error, Error} -> + ?Error([{enum, dperl_dal:to_atom(Name)}], + "~p starting ~p_~s: ~p", [Sup, Mod, Name, Error], []), + {error, Error}; + Other -> + ?Error([{enum, dperl_dal:to_atom(Name)}], + "~p starting ~p_~s: ~p", [Sup, Mod, Name, Other], []), + Other + end. + +-spec child_id(atom() | list(), list() | binary()) -> atom(). +child_id(M, C) when is_atom(M) -> child_id(atom_to_list(M), C); +child_id(M, C) when is_binary(C) -> child_id(M, binary_to_list(C)); +child_id(M, C) when is_list(M), is_list(C) -> list_to_atom(M++"_"++C). + +init({JobOrService, Behaviors, Sup}) + when is_record(JobOrService, dperlJob); + is_record(JobOrService, dperlService) -> + Mod = ?G(JobOrService,module), + Name = ?G(JobOrService,name), + Args = ?G(JobOrService,args), + %% to get the following keys in oci process add it explicitly in + %% dperl_dal:oci_opts + put(debug, maps:get(debug, Args, false)), + put(jstate, s), + put(module, Mod), + put(name, Name), + put(jname, dperl_dal:to_atom(Name)), + put(supervisor, Sup), + ?Debug("starting ~p_~s", [Mod, Name]), + process_flag(trap_exit, true), + Bhaves = [self() ! {internal, {behavior, B, Args}} || B <- Behaviors], + DefaultModState = Mod:init_state(dperl_dal:job_state(Name)), + Type = ?RC(JobOrService), + JobOrServiceSortedLinks = + case JobOrService of + #dperlJob{srcArgs = #{links := UnsortedSrcLinks} = SrcArgs, + dstArgs = #{links := UnsortedDstLinks} = DstArgs} -> + JobOrService#dperlJob{ + srcArgs = SrcArgs#{links => dperl_dal:sort_links(UnsortedSrcLinks)}, + dstArgs = DstArgs#{links => dperl_dal:sort_links(UnsortedDstLinks)}}; + #dperlJob{dstArgs = #{links := UnsortedLinks} = DstArgs} -> + JobOrService#dperlJob{dstArgs = DstArgs#{links => dperl_dal:sort_links(UnsortedLinks)}}; + #dperlJob{srcArgs = #{links := UnsortedLinks} = SrcArgs} -> + JobOrService#dperlJob{srcArgs = SrcArgs#{links => dperl_dal:sort_links(UnsortedLinks)}}; + _ -> JobOrService + end, + case catch Mod:init({JobOrServiceSortedLinks, DefaultModState}) of + {'EXIT', Reason} -> + {stop, {shutdown, Reason}}; + {ok, ModState} -> + update_state(Type, Name, Mod, Bhaves, ModState), + dperl_dal:set_running(JobOrService, true), + {ok, #st{n = Name, m = Mod, ms = ModState, js = JobOrService}}; + {ok, ModState, Timeout} -> + update_state(Type, Name, Mod, Bhaves, ModState), + dperl_dal:set_running(JobOrService, true), + {ok, #st{n = Name, m = Mod, ms = ModState, js = JobOrService}, Timeout}; + Error -> + dperl_dal:worker_error(Type, <<"init">>, <<"init">>, Error), + Error + end. + +handle_call({internal, Args}, From, St) -> handle_call_i(Args, From, St); +handle_call(Normal, From, #st{m = Mod, ms = ModState} = St) -> + case catch Mod:handle_call(Normal, From, ModState) of + {'EXIT', Reason} -> + {stop, {shutdown, Reason}, St}; + {reply, Reply, NewModState} -> + {reply, Reply, St#st{ms = NewModState}}; + {reply, Reply, NewModState, Timeout} -> + {reply,Reply,St#st{ms = NewModState},Timeout}; + {noreply, NewModState} -> + {noreply, St#st{ms = NewModState}}; + {noreply, NewModState, Timeout} -> + {noreply, St#st{ms = NewModState}, Timeout}; + {stop,Reason,Reply,NewModState} -> + {stop,Reason,Reply,St#st{ms = NewModState}}; + {stop, Reason, NewModState} -> + {stop, Reason, St#st{ms = NewModState}} + end. + +handle_cast({internal, Args}, St) -> handle_cast_i(Args, St); +handle_cast(Normal, #st{m = Mod, ms = ModState} = St) -> + case catch Mod:handle_cast(Normal, ModState) of + {'EXIT', Reason} -> + {stop, {shutdown, Reason}, St}; + {noreply, NewModState} -> + {noreply, St#st{ms = NewModState}}; + {noreply, NewModState, Timeout} -> + {noreply, St#st{ms = NewModState}, Timeout}; + {stop, Reason, NewModState} -> + {stop, Reason, St#st{ms = NewModState}} + end. + +handle_info({internal, Args}, St) -> handle_info_i(Args, St); +handle_info(Normal, #st{m = Mod, ms = ModState} = St) -> + case catch Mod:handle_info(Normal, ModState) of + {'EXIT', Reason} -> + {stop, {shutdown, Reason}, St}; + {noreply, NewModState} -> + {noreply, St#st{ms = NewModState}}; + {noreply, NewModState, Timeout} -> + {noreply, St#st{ms = NewModState}, Timeout}; + {stop, Reason, NewModState} -> + {stop, Reason, St#st{ms = NewModState}} + end. + +code_change(OldVsn, #st{m = Mod, ms = ModState} = St, Extra) -> + case catch Mod:code_change(OldVsn, ModState, Extra) of + {'EXIT', Reason} -> + {error, Reason}; + {ok, NewModState} -> + {ok, St#st{ms = NewModState}}; + {error, Reason} -> + {error, Reason} + end. + +format_status(Opt, [PDict, #st{m = Mod, ms = ModState}]) -> + case catch Mod:format_status(Opt, [PDict, ModState]) of + {'EXIT', Reason} -> + ?JError("format_status ~p", [Reason]); + Result -> Result + end. + +terminate(Reason, #st{m = Mod, ms = ModState, n = Name, js = JobOrService}) -> + State = Mod:get_status(ModState), + case ?RC(JobOrService) of + job -> dperl_dal:update_job_dyn(Name, State, stopped); + service -> dperl_dal:update_service_dyn(Name, State, stopped) + end, + dperl_dal:set_running(JobOrService, false), + case catch Mod:terminate(Reason, ModState) of + {'EXIT', Reason} -> + ?JError("terminate ~p", [Reason]); + Result -> Result + end. + +handle_call_i(Args, _From, St) -> + ?JError("handle_call_i(~p)", [Args]), + {reply, ok, St}. + +handle_cast_i(Args, St) -> + ?JError("handle_cast_i(~p)", [Args]), + {noreply, St}. + +handle_info_i({behavior, BMod, Args}, St) -> + case catch BMod:execute(St#st.m, St#st.n, St#st.ms, Args) of + {'EXIT', Reason} -> {stop, {shutdown, Reason}, St}; + Ms -> {noreply, St#st{ms = Ms}} + end; +handle_info_i(Args, St) -> + ?JError("handle_info_i(~p)", [Args]), + {noreply, St}. + +update_state(job, Name, Mod, Bhaves, ModuleState) -> + dperl_dal:job_error_close(), + if length(Bhaves) < 1 -> + dperl_dal:update_job_dyn(Name, Mod:get_status(ModuleState), synced); + true -> + dperl_dal:update_job_dyn(Name, Mod:get_status(ModuleState), undefined) + end; +update_state(service, _Name, _Mod, _Behaves, _ModuleState) -> no_op. diff --git a/src/dperl_worker_sup.erl b/src/dperl_worker_sup.erl new file mode 100644 index 00000000..8c2e1f89 --- /dev/null +++ b/src/dperl_worker_sup.erl @@ -0,0 +1,31 @@ +-module(dperl_worker_sup). +-behaviour(supervisor). + +-include("dperl.hrl"). + +%% Supervisor callbacks +-export([start_link/1, init/1]). + +%% =================================================================== +%% API functions +%% =================================================================== + +start_link(service) -> start_link(dperl_job_sup); +start_link(job) -> start_link(dperl_service_sup); +start_link(Ext) when Ext == dperl_job_sup; Ext == dperl_service_sup -> + ?Info("~p starting...~n", [Ext]), + case supervisor:start_link({local, Ext}, ?MODULE, [Ext]) of + {ok,_} = Success -> + ?Info("~p started!~n", [Ext]), + Success; + Error -> + ?Error("~p failed to start ~p~n", [Ext, Error]), + Error + end. + +%% =================================================================== +%% Supervisor callbacks +%% =================================================================== + +init([_Ext]) -> + {ok, { #{strategy => one_for_one, intensity => 5, period => 1}, []} }. From ea2f564f7b2cf89da95d9cb51a846a39d599eb18 Mon Sep 17 00:00:00 2001 From: Bikram Chatterjee Date: Tue, 14 Apr 2020 14:10:54 +0200 Subject: [PATCH 02/72] WIP cleanup and renames --- rebar.config | 5 + src/dperl_file_copy.erl | 791 ++++++++++++++++++ ...dperl_ora.erl => dperl_service_oracle.erl} | 4 +- ...dperl_ora.hrl => dperl_service_oracle.hrl} | 0 src/dperl_status_push.erl | 151 ---- src/ouraring_crawl.erl | 104 +++ test.escript | 28 + 7 files changed, 930 insertions(+), 153 deletions(-) create mode 100644 src/dperl_file_copy.erl rename src/{dperl_ora.erl => dperl_service_oracle.erl} (99%) rename src/{dperl_ora.hrl => dperl_service_oracle.hrl} (100%) delete mode 100644 src/dperl_status_push.erl create mode 100644 src/ouraring_crawl.erl create mode 100644 test.escript diff --git a/rebar.config b/rebar.config index 6893acfc..e47db9cc 100644 --- a/rebar.config +++ b/rebar.config @@ -45,6 +45,11 @@ {prometheus, "4.5.0"} ]}. +{erl_first_files, [ + "src/dperl_worker.erl", + "src/dperl_strategy_scr.erl" +]}. + {deps_error_on_conflict, false}. {dist_node, [ diff --git a/src/dperl_file_copy.erl b/src/dperl_file_copy.erl new file mode 100644 index 00000000..a8e2ba2a --- /dev/null +++ b/src/dperl_file_copy.erl @@ -0,0 +1,791 @@ +-module(dperl_file_copy). + +-include("dperl.hrl"). + +-behavior(dperl_worker). + +% dperl_job exports +-export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, + get_status/1, init_state/1]). + +-ifndef(TEST). + +-define(SFTP_TIMEOUT, + ?GET_CONFIG(sfhSftpTimeout, [get(name)], 3000, + "Delay in millisecond before waiting for response from" + " sftp after which {error, timeout} will be returned") + ). + +-define(PICK_PROFILE_FUN(_Default), + ?GET_CONFIG( + pickProfileFun, [get(name)], + <>, + "SFH Transfer profile election function" + ) + ). + +-else. + +-define(SFTP_TIMEOUT, 3000). + +-define(PICK_PROFILE_FUN(_Default), <>). + +-endif. + +-define(SSH_DEFAULT_PORT, 22). + +-record(state, {name, src_ctx, file_info, src_root, srcs, dsts, src_conn, + dst_conn, src_connected = false, dst_connected = false, + status_path, status_extra, status_ctx, status_dir, dst_root, + dst_ctx, is_list_dir = true, src_file_handle, src_position = 0, + type, pick_profile_fun, pick_profile_fun_ctx}). + +%------------------------------------------------------------------------------- +% dperl_worker +%------------------------------------------------------------------------------- + +get_status(#state{}) -> #{}. + +init_state([]) -> #state{}; +init_state([#dperlNodeJobDyn{} | _]) -> #state{}; +init_state([_ | Others]) -> init_state(Others). + +init({#dperlJob{name=Name, dstArgs = DstArgs, args = Args, + srcArgs = SrcArgs}, State}) -> + ?JInfo("Starting"), + PickProfileFunStr = ?PICK_PROFILE_FUN( + fun(ProfileList, _Context) -> + {Profile, _} = lists:nth( + rand:uniform(length(ProfileList)), + ProfileList + ), + NewContext = Profile, + {Profile, NewContext} + end + ), + ?JTrace("PickProfileFunStr ~s", [PickProfileFunStr]), + try imem_compiler:compile(PickProfileFunStr) of + PickProfileFun when is_function(PickProfileFun, 2) -> + case catch parse_args(Args, SrcArgs, DstArgs, State) of + {ok, #state{status_path = SPath, status_dir = SDir} = State1} -> + SDir1 = path_join([SPath, SDir]), + case connect_check_dir(SDir1, SDir1) of + {ok, SCtx} -> + dperl_dal:activity_logger(SCtx, SDir, + State1#state.status_extra), + self() ! execute, + {ok, + State1#state{ + name=Name, status_ctx = SCtx, + pick_profile_fun = PickProfileFun + }}; + {error, SErr} -> + ?JError("Status dir not accessible : ~p", [SErr]), + {stop, SErr} + end; + Error -> + ?JError("Invalid job parameters : ~p", [Error]), + {stop, badarg} + end + catch + _:Error -> + ?JError("Bad configuration pickProfileFun : ~p", [Error]), + {stop, Error} + end; +init({Args, _}) -> + ?JError("bad start parameters ~p", [Args]), + {stop, badarg}. + +handle_call(Request, _From, State) -> + ?JWarn("Unsupported handle_call ~p", [Request]), + {reply, ok, State}. + +handle_cast(Request, State) -> + ?JWarn("Unsupported handle_cast ~p", [Request]), + {noreply, State}. + +handle_info(execute, #state{name = Job} = State) -> + try + {ok, State1} = connect_check_src(State), + case get_source_files(State1) of + {none, State2} -> + {ok, State2}; + {ok, State2} -> + {ok, State3} = connect_check_dst(State2), + {ok, State4} = open_tmp_files(State3), + process_files(State4) + end + of + {ok, #state{is_list_dir = true} = State5} -> + %% all the files in the state has been processed + dperl_dal:update_job_dyn(Job, #{}, idle), + dperl_dal:job_error_close(), + erlang:send_after(?CYCLE_IDLE_WAIT(?MODULE, Job), self(), execute), + {noreply, State5}; + {ok, State5} -> + dperl_dal:update_job_dyn(Job, #{}, synced), + dperl_dal:job_error_close(), + erlang:send_after(?CYCLE_ALWAYS_WAIT(?MODULE, Job), self(), execute), + {noreply, State5} + catch + error:{sfh_error, Error, State5} -> + ?JError("~p ~p step_failed~n~p", [?MODULE, Error, erlang:get_stacktrace()]), + dperl_dal:update_job_dyn(Job, #{}, error), + erlang:send_after(?CYCLE_ERROR_WAIT(?MODULE, Job), self(), execute), + dperl_dal:job_error(get(jstate), <<"step failed">>, Error), + {noreply, State5}; + Class:Error -> + ?JError("~p ~p ~p~n~p", [?MODULE, Class, Error, erlang:get_stacktrace()]), + dperl_dal:update_job_dyn(Job, error), + erlang:send_after(?CYCLE_ERROR_WAIT(?MODULE, Job), self(), execute), + dperl_dal:job_error(get(jstate), atom_to_binary(Class, utf8), Error), + {noreply, State} + end; +handle_info(Msg, State) -> + ?JWarn("Unhandled msg : ~p", [Msg]), + {noreply, State}. + +terminate(Reason, State) -> + check_close_conn(State#state.src_ctx, State#state.srcs), + check_close_conn(State#state.dst_ctx, State#state.dsts), + ?JInfo("terminate ~p", [Reason]). + +%------------------------------------------------------------------------------- +% private +%------------------------------------------------------------------------------- + +process_files(#state{name = Job} = State) -> + case get_source_data(State, ?MAX_BULK_COUNT(?MODULE, Job)) of + {ok, Data, State1} -> + {ok, State2} = write_tmp(Data, State1), + process_files(State2); + {eof, Data, State1} -> + {ok, State2} = write_tmp(Data, State1), + {ok, State3} = rename_tmp_files(State2), + {ok, State3} + end. + +connect_check_src(#state{src_connected = true} = State) -> + dperl_dal:activity_logger(State#state.status_ctx, State#state.status_dir, + State#state.status_extra), + case check_dir(State#state.src_ctx, State#state.src_root) of + ok -> + {ok, State}; + {error, Error} -> + error({sfh_error, Error, State#state{src_connected = false}}) + end; +connect_check_src(#state{src_root = Root, src_conn = Conn} = State) -> + case connect_check_dir(Conn, Root) of + {ok, Ctx} -> + ?JInfo("Connected to source"), + {ok, State#state{src_ctx = Ctx, src_connected = true}}; + {error, Error} -> + ?JError("Connecting to src : ~p", [Error]), + error({sfh_error, Error}) + end. + +connect_check_dst(#state{dst_connected = true} = State) -> + case check_dir(State#state.dst_ctx, State#state.dst_root) of + ok -> + {ok, State}; + {error, Error} -> + error({sfh_error, Error, + check_error(Error, dst, State#state{dst_connected = false})}) + end; +connect_check_dst(#state{dst_conn = Conn, dst_root = Root} = State) -> + case connect_check_dir(Conn, Root) of + {ok, Ctx} -> + ?JInfo("Connected to destination"), + {ok, State#state{dst_ctx = Ctx, dst_connected = true}}; + {error, Error} -> + ?JError("Connecting to dst : ~p", [Error]), + error({sfh_error, Error, check_error(Error, dst, State)}) + end. + +get_source_files(#state{is_list_dir = true, srcs = Srcs} = State) -> + Fun = fun(K, #{path := Path, match := Match} = Src, Ctx, STimeout) -> + case imem_file:list_dir(Ctx, Path, STimeout) of + {error, Error} -> + ?JError("Listing dir ~p ~s failed : ~p", [K, Path, Error]), + {error, Error}; + {ok, Files} -> + ?JTrace("list dir ~p ~s result : ~p", [K, Path, Files]), + {ok, Src#{files => filter_files(Files, Match), + matched => false}} + end + end, + case maps_map(Fun, [State#state.src_ctx, ?SFTP_TIMEOUT], Srcs) of + {error, Error, Srcs1} -> + error({sfh_error, Error, is_conn_closed(Error, src, State#state{srcs = Srcs1})}); + {ok, Srcs1} -> + get_source_files(State#state{srcs = Srcs1, is_list_dir = false}) + end; +get_source_files(#state{file_info = undefined, srcs = Srcs} = State) -> + case maps:fold(fun(_Profile, #{files := []}, Acc) -> Acc; + (Profile, #{files := Files}, Acc) -> + [{Profile, Files} | Acc] + end, [], Srcs) of + [] -> + ?JTrace("No matches found"), + {none, State#state{is_list_dir = true}}; + ProfileList -> + {K, NewProfileFunCtx} = + (State#state.pick_profile_fun)( + ProfileList, State#state.pick_profile_fun_ctx + ), + #{K := #{path := Path, files := [File|Files]} = Src} = Srcs, + FileInfo = {K, Path, File}, + ?JTrace("File match found : ~p", [FileInfo]), + Srcs1 = Srcs#{K => Src#{files => Files}}, + case is_file_in_hist(File, K, State) of + true -> + ?JTrace("Skipping file ~p, since its already in hst file", [FileInfo]), + get_source_files( + State#state{srcs = Srcs1, + pick_profile_fun_ctx = NewProfileFunCtx}); + false -> + {ok, + State#state{ + file_info = FileInfo, srcs = Srcs1, + pick_profile_fun_ctx = NewProfileFunCtx + }} + end + end. + +get_source_data(#state{file_info = {_, Path, File}, src_file_handle = undefined} = State, BulkSize) -> + case imem_file:open(State#state.src_ctx, path_join([Path, File]), [read], ?SFTP_TIMEOUT) of + {ok, FileHandle} -> + ?JTrace("Opened src file ~s", [File]), + get_source_data(State#state{src_file_handle = FileHandle}, BulkSize); + {error, Error} -> + ?JError("Opening file ~s error : ~p path : ~p", [File, Error, Path]), + error({sfh_error, Error, check_error(Error, src, State)}) + end; +get_source_data(#state{src_file_handle = FileHandle, src_ctx = Ctx} = State, BulkSize) -> + case read(Ctx, FileHandle, State#state.src_position, BulkSize * 1024) of + {ok, Data, Position} -> + {ok, Data, State#state{src_position = Position}}; + {eof, Data} -> + ?JTrace("End of file ~p reached", [State#state.file_info]), + {eof, Data, State#state{src_file_handle = close_file(Ctx, FileHandle)}}; + {error, Error} -> + ?JError("Reading ~p : ~p", [State#state.file_info, Error]), + error({sfh_error, Error, check_error(Error, src, State)}) + end. + +open_tmp_files(#state{type = multipleOneToOne, file_info = {K, _, File}, + dsts = Dsts, dst_ctx = DCtx} = State) -> + #{K := Dst} = Dsts, + case open_tmp_file(K, Dst, DCtx, tmp_file_name(File), ?SFTP_TIMEOUT) of + {error, Error} -> + error({sfh_error, Error, check_error(Error, dst, State)}); + {ok, Dst1} -> + {ok, State#state{dsts = Dsts#{K => Dst1}}} + end; +open_tmp_files(#state{file_info = {_, _, File}, dsts = Dsts} = State) -> + case maps_map(fun open_tmp_file/5, [State#state.dst_ctx, + tmp_file_name(File), ?SFTP_TIMEOUT], Dsts) of + {error, Error, Dsts1} -> + error({sfh_error, Error, check_error(Error, dst, State#state{dsts = Dsts1})}); + {ok, Dsts1} -> + {ok, State#state{dsts = Dsts1}} + end. + +write_tmp(_, #state{file_info = undefined} = State) -> + ?JError("Src file already closed"), + {ok, State}; +write_tmp(<<>>, State) -> {ok, State}; +% N to N copy - srcs and dsts names should match to copy +write_tmp(Data, #state{type = multipleOneToOne, file_info = {K, _, File}, + dsts = Dsts, dst_ctx = DCtx} = State) -> + #{K := Dst} = Dsts, + case write_tmp(K, Dst, DCtx, Data, File, ?SFTP_TIMEOUT) of + {error, Error} -> + error({sfh_error, Error, check_error(Error, dst, State)}); + {ok, Dst1} -> + {ok, State#state{dsts = Dsts#{K => Dst1}}} + end; +% 1 to 1 and 1 to N +write_tmp(Data, #state{file_info = {_, _, File}, dsts = Dsts, + dst_ctx = DCtx} = State) -> + case maps_map(fun write_tmp/6, [DCtx, Data, File, ?SFTP_TIMEOUT], Dsts) of + {error, Error, Dsts1} -> + error({sfh_error, Error, check_error(Error, dst, State#state{dsts = Dsts1})}); + {ok, Dsts1} -> + {ok, State#state{dsts = Dsts1}} + end. + +check_dir(Ctx, Path) -> + case imem_file:is_dir(Ctx, Path, ?SFTP_TIMEOUT) of + false -> + imem_file:disconnect(Ctx), + {error, Path ++ " dir not found"}; + true -> ok + end. + +close_file(Ctx, #{file_handle := FileHandle}) -> close_file(Ctx, FileHandle); +close_file(_Ctx, undefined) -> undefined; +close_file(Ctx, FileHandle) -> + catch imem_file:close(Ctx, FileHandle, ?SFTP_TIMEOUT), + undefined. + +check_args(Srcs, Dsts, State) -> + case check_arg_counts(lists:sort(maps:keys(Srcs)), lists:sort(maps:keys(Dsts))) of + invalid -> + {error, "Src and Dst args does not match"}; + Type -> + {ok, State#state{dsts = Dsts, srcs = Srcs, type = Type}} + end. + +check_arg_counts(S, D) when length(S) == 1, length(D) == 1 -> + ?JTrace("Single source and single destination conf"), + oneToOne; +check_arg_counts(S, D) when length(S) == 1 -> + ?JTrace("Single source and multiple destination (~p) conf", [length(D)]), + oneToMany; +check_arg_counts(S, D) when length(D) == 1 -> + ?JTrace("Multiple source (~p) and single destination conf", [length(S)]), + manyToOne; +check_arg_counts(N, N) -> + ?JTrace("Multiple source (~p) and multiple destination (~p) conf", [length(N), length(N)]), + multipleOneToOne; +check_arg_counts(S, D) -> + ?JError("Source (~p) and Destination (~p) conf mismatch", [S, D]), + invalid. + +parse_args(#{status_path := StatusPath, status_extra := Extra, + status_dir := SName}, SrcArg, DstArg, State) -> + case parse_args(SrcArg) of + {ok, #{root := SrcRoot} = SrcConn, Srcs} -> + case parse_args(DstArg) of + {ok, #{root := DstRoot} = DstConn, Dsts} -> + State1 = State#state{status_path = StatusPath, + status_extra = Extra, + status_dir = SName, src_root = SrcRoot, + dst_root = DstRoot, src_conn = SrcConn, + dst_conn = DstConn}, + check_args(Srcs, Dsts, State1); + Error -> Error + end; + Error -> Error + end. + +parse_args(#{default := _} = Args) -> + {Default, Rest} = maps:take(default, Args), + Default1 = maps:merge(#{mask => "", path => "", + backup => undefined, tmp_dir => undefined}, + Default), + case process_default(Default1) of + {ok, Default2} -> + {ok, Conf} = parse_args_rest(Default2, Rest), + {ok, Default2, Conf}; + {error, _} = Error -> + Error + end. + +process_default(#{proto := cifs, root := _} = Default) -> {ok, Default}; +process_default(#{proto := sftp, host := _, user := _, password := _, + root := _} = Default) -> + {ok, Default#{opts => maps:get(opts, Default, []), + port => maps:get(port, Default, ?SSH_DEFAULT_PORT)}}; +process_default(#{proto := sftp, host := _, user := _, key := _, + root := _} = Default) -> + {ok, Default#{port => maps:get(port, Default, ?SSH_DEFAULT_PORT), + opts => maps:get(opts, Default, [])}}; +process_default(Default) -> + ?JError("Invalid Default : ~p", [Default]), + {error, badarg}. + +parse_args_rest(#{mask := Mask, path := Path, backup := Backup, + tmp_dir := TmpDir}, Rest) when map_size(Rest) == 0 -> + HistFile = <<(get(name))/binary, ".hst">>, + {ok, #{"1" => #{mask => Mask, path => Path, backup => Backup, + match => transform_mask(Mask), hist_file => HistFile, + tmp_dir => TmpDir}}}; +parse_args_rest(#{mask := Mask, path := Path, backup := Backup, + tmp_dir := TmpDir}, Rest) -> + DefaultConf = #{mask => Mask, path => Path, backup => Backup, + tmp_dir => TmpDir}, + Confs = maps:map( + fun(K, V) -> + #{mask := Mask1} = V1 = maps:merge(DefaultConf, V), + KBin = + if not (is_list(K) orelse is_binary(K)) -> + list_to_binary(io_lib:format("~p", [K])); + is_list(K) -> list_to_binary(K); + true -> K + end, + HistFile = <<(get(name))/binary, "(", KBin/binary, ").hst">>, + V1#{match => transform_mask(Mask1), hist_file => HistFile} + end, Rest), + {ok, Confs}. + +tmp_file_name(File) -> "tmp_" ++ File. + +transform_mask(Mask) -> + %% replace all `?` with . + Mask1 = re:replace(Mask, "[?]", ".", [global, {return, binary}]), + %% move all hashes into parenthesis + Mask2 = re:replace(Mask1, "[#]+", "(&)", [global, {return, binary}]), + %% replace all hashes with \\d to match integers + Mask3 = re:replace(Mask2, "[#]", "\\\\d", [global, {return, binary}]), + %% make it a full line match + <<"^", Mask3/binary, "$">>. + +read(Ctx, FileHandle, Position, Size) -> + read(Ctx, FileHandle, Position, Size, <<>>). + +read(Ctx, FileHandle, Position, Size, Acc) -> + case imem_file:pread(Ctx, FileHandle, Position, Size, ?SFTP_TIMEOUT) of + {ok, Data} when size(Data) < Size -> + DataSize = size(Data), + NewPosition = Position + DataSize, + NewLen = Size - DataSize, + read(Ctx, FileHandle, NewPosition, NewLen, + <>); + {ok, Data} -> {ok, <>, Position + Size}; + eof -> {eof, Acc}; + {error, Error} -> {error, Error} + end. + +rename_tmp_files(#state{type = multipleOneToOne, file_info = {K, _, File}, dsts = Dsts, + dst_ctx = DCtx, dst_conn = #{root := DstRoot}} = State) -> + #{K := #{mask := SrcMask}} = State#state.srcs, + #{K := Dst} = Dsts, + case rename_tmp_file(K, Dst, DCtx, SrcMask, DstRoot, File, ?SFTP_TIMEOUT) of + {error, Error} -> + error({sfh_error, Error, check_error(Error, dst, State)}); + {ok, Dst1} -> + add_to_hist(State), + delete_or_backup_src_file(State#state{dsts = Dsts#{K => Dst1}}) + end; +rename_tmp_files(#state{file_info = {K, _, File}, dsts = Dsts, dst_ctx = DCtx, + dst_conn = #{root := DstRoot}} = State) -> + #{K := #{mask := SrcMask}} = State#state.srcs, + case maps_map(fun rename_tmp_file/7, + [DCtx, SrcMask, DstRoot, File, ?SFTP_TIMEOUT], Dsts) of + {error, Error, Dsts1} -> + error({sfh_error, Error, check_error(Error, dst, State#state{dsts = Dsts1})}); + {ok, Dsts1} -> + add_to_hist(State), + delete_or_backup_src_file(State#state{dsts = Dsts1}) + end. + +delete_or_backup_src_file(#state{file_info = {K, _Path, _File}} = State) -> + case maps:get(K, State#state.srcs) of + #{backup := undefined} -> + delete_src_file(State); + #{backup := BackupDir} -> + backup_src_file(BackupDir, State) + end. + +delete_src_file(#state{file_info = {_, Path, File}} = State) -> + case imem_file:delete(State#state.src_ctx, path_join([Path, File]), ?SFTP_TIMEOUT) of + ok -> + ?JInfo("Deleted file : ~s", [File]), + {ok, State#state{file_info = undefined, src_position = 0}}; + {error, Error} -> + ?JError("Deleting file ~s : ~p", [File, Error]), + error({sfh_error, Error, check_error(Error, src, State)}) + end. + +backup_src_file(BackupDir, #state{file_info = {_, Path, File}, src_ctx = Ctx} = State) -> + FilePath = path_join([State#state.src_root, Path, File]), + BackupPath = path_join([State#state.src_root, BackupDir, File]), + case imem_file:rename(Ctx, FilePath, BackupPath, ?SFTP_TIMEOUT) of + ok -> + ?JInfo("Backed up file : ~s", [File]), + {ok, State#state{file_info = undefined, src_position = 0}}; + {error, Error} -> + ?JError("Backing up file ~s failed : ~p", [BackupPath, Error]), + error({sfh_error, Error, check_error(Error, src, State)}) + end. + +check_error(Err, Type, #state{src_ctx = SCtx, src_file_handle = SFileHandle, + dsts = Dsts, dst_ctx = DCtx} = State) -> + Dsts1 = maps:map( + fun(_K, V) -> + V#{file_handle => close_file(DCtx, V)} + end, Dsts), + is_conn_closed(Err, Type, + State#state{file_info = undefined, src_position = 0, + src_file_handle = close_file(SCtx, SFileHandle), + dsts = Dsts1}). + +is_conn_closed(closed, src, State) -> + Srcs = check_close_conn(State#state.src_ctx, State#state.srcs), + State#state{src_connected = false, srcs = Srcs}; +is_conn_closed(closed, dst, State) -> + Dsts = check_close_conn(State#state.dst_ctx, State#state.dsts), + State#state{dst_connected = false, dsts = Dsts}; +is_conn_closed(_Err, _Type, State) -> State. + +check_close_conn(Ctx, Confs) -> + Conf1 = maps:map( + fun(_K, M) -> + close_file(Ctx, M), + M#{file_handle => undefined, files => []} + end, Confs), + imem_file:disconnect(Ctx), + Conf1. + +connect_check_dir(Conn, Path) -> + Conn1 = case is_map(Conn) of + true -> Conn#{path => Path}; + false -> Conn + end, + case imem_file:connect(Conn1) of + {error, _} = Error -> Error; + Ctx -> + case check_dir(Ctx, Path) of + {error, _} = Err -> Err; + ok -> {ok, Ctx} + end + end. + +maps_map(Fun, Args, Map) when is_map(Map) -> + maps_map(Fun, Args, maps:keys(Map), Map). + +maps_map(_Fun, _Args, [], Map) -> {ok, Map}; +maps_map(Fun, Args, [K | Keys], Map) -> + case apply(Fun, [K, maps:get(K, Map) | Args]) of + {ok, V} -> + maps_map(Fun, Args, Keys, Map#{K => V}); + {error, Error} -> + {error, Error, Map} + end. + +get_tmp_path(#{path := Path, tmp_dir := undefined}) -> Path; +get_tmp_path(#{tmp_dir := TmpDir}) -> TmpDir. + +open_tmp_file(K, Dst, Ctx, File, Timeout) -> + TmpFilePath = path_join([get_tmp_path(Dst), File]), + case imem_file:open(Ctx, TmpFilePath, [write], Timeout) of + {error, Error} -> + ?JError("Opening ~s error : ~p", [File, Error]), + {error, Error}; + {ok, FileHandle} -> + ?JTrace("Opened dst file ~s to write for conn : ~p", [File, K]), + {ok, Dst#{file_handle => FileHandle}} + end. + +write_tmp(K, #{file_handle := FileHandle} = Dst, Ctx, Data, File, STimeout) -> + case imem_file:write(Ctx, FileHandle, Data, STimeout) of + {error, Error} -> + ?JError("Writing tmp(~p) file error : ~p", [K, Error]), + {error, Error}; + ok -> + ?JTrace("Copied ~p kb from ~s", [byte_size(Data) / 1024, File]), + {ok, Dst} + end. + +rename_tmp_file(K, #{file_handle := FileHandle, mask := DMask, path := Path} = Dst, Ctx, + SMask, DRoot, SFile, Timeout) -> + imem_file:close(Ctx, FileHandle, Timeout), + DstFile = dstFileName(SFile, SMask, DMask), + TmpFile = tmp_file_name(SFile), + TmpPath = path_join([DRoot, get_tmp_path(Dst), TmpFile]), + NewPath = path_join([DRoot, Path, DstFile]), + case imem_file:rename(Ctx, TmpPath, NewPath, Timeout) of + ok -> + ?JTrace("Renamed ~s to ~s for ~p", [TmpFile, DstFile, K]), + {ok, Dst#{file_handle => undefined}}; + {error, Error} -> + ?JError("Renaming ~s error : ~p", [TmpFile, Error]), + {error, Error} + end. + +is_file_in_hist(File, Key, State) -> + #{Key := #{hist_file := HistFile}} = State#state.srcs, + case imem_file:read_file(State#state.status_ctx, HistFile, ?SFTP_TIMEOUT) of + {error, Error} -> + ?JError("Reading hist file error : ~p", [Error]), + false; + {ok, Hist} -> + lists:member(list_to_binary(File), re:split(Hist, "\n")) + end. + +add_to_hist(#state{file_info={K, _, File}, srcs = Srcs, status_ctx = SCtx}) -> + #{K := #{hist_file := HistFile}} = Srcs, + Timeout = ?SFTP_TIMEOUT, + Data = [list_to_binary(File), <<"\n">>], + case imem_file:read_file(SCtx, HistFile, Timeout) of + {error, Error} -> + ?JError("Reading hist file ~s error : ~p", [HistFile, Error]), + imem_file:write_file(SCtx, HistFile, Data, Timeout); + {ok, Hist} -> + Lines = re:split(string:trim(Hist), "\n", [{return, list}]), + Lines1 = + case length(Lines) of + L when L < 100 -> + Lines ++ [File]; + L -> + {_, List2} = lists:split(L - 99, Lines), + List2 ++ [File] + end, + imem_file:write_file(SCtx, HistFile, string:join(Lines1, "\n"), Timeout) + end, + ?JTrace("Filename ~s written to hst file ~s", [File, HistFile]). + +filter_files([], _) -> []; +filter_files([File | Rest], Match) -> + lists:sort( + case re:run(File, Match, [{capture, all_but_first, binary}]) of + {match, _} -> [File | filter_files(Rest, Match)]; + nomatch -> filter_files(Rest, Match) + end + ). + +path_join([[], File]) -> File; +path_join(Paths) -> filename:join(Paths). + +%% ----------------------------------------------------------------------------- +%% dstFileName +%% ----------------------------------------------------------------------------- + +dstFileName(SrcFN, _, "") -> SrcFN; +dstFileName(SrcFN, _, "*") -> SrcFN; +dstFileName(SrcFN, _, ".*") -> SrcFN; +dstFileName(SrcFN, _, "*.*") -> SrcFN; +dstFileName(SrcFN, SrcFM, DstFM) + when is_list(SrcFN), is_list(SrcFM), is_list(DstFM) -> + NewDstFM = hash(SrcFN, SrcFM, DstFM), + dstFN(SrcFN, NewDstFM, []). + +dstFN(_SrcFN, [], DstFile) -> lists:reverse(DstFile); +dstFN(SrcFN, [$<|DstFM], DstFile) -> + dstFN(SrcFN, dt(DstFM), DstFile); +dstFN([FC|SrcFN], [C|DstFM], DstFile) when C == $?; C == $# -> + dstFN(SrcFN, DstFM, [FC|DstFile]); +dstFN([_|SrcFN], [C|DstFM], DstFile) -> + dstFN(SrcFN, DstFM, [C|DstFile]); +dstFN([], [C|DstFM], DstFile) -> + dstFN([], DstFM, [C|DstFile]). + +dt(DstFM) -> + {{Year, Month, Day}, {Hour, Minute, Second}} + = calendar:now_to_local_time(os:timestamp()), + dt( + DstFM, + {io_lib:format("~4..0B", [Year]), io_lib:format("~2..0B", [Month]), + io_lib:format("~2..0B", [Day])}, + {io_lib:format("~2..0B", [Hour]), io_lib:format("~2..0B", [Minute]), + io_lib:format("~2..0B", [Second])}, + [] + ). + +dt([$>|DstFM], _, _, Buf) -> lists:flatten([Buf, DstFM]); +dt([$M,$M|DstFM], {_,M,_} = Dt, T, Buf) -> dt(DstFM, Dt, T, Buf ++ M); +dt([$D,$D|DstFM], {_,_,D} = Dt, T, Buf) -> dt(DstFM, Dt, T, Buf ++ D); +dt([$h,$h|DstFM], Dt, {H,_,_} = T, Buf) -> dt(DstFM, Dt, T, Buf ++ H); +dt([$m,$m|DstFM], Dt, {_,M,_} = T, Buf) -> dt(DstFM, Dt, T, Buf ++ M); +dt([$s,$s|DstFM], Dt, {_,_,S} = T, Buf) -> dt(DstFM, Dt, T, Buf ++ S); +dt([$Y,$Y,$Y,$Y|DstFM], {Y,_,_} = Dt, T, Buf) -> dt(DstFM, Dt, T, Buf ++ Y). + +hash(_SrcFN, _SrcFM, DstFM) -> DstFM. + +%% ----------------------------------------------------------------------------- +%% TESTS +%% ----------------------------------------------------------------------------- + +-ifdef(TEST). + +-include_lib("eunit/include/eunit.hrl"). + +all_test_() -> + Ctx = #{proto => local, path => "test"}, + {inparallel, + [ {"init bad arg test1", ?_assertEqual({stop, badarg}, init({#dperlJob{name = <<"test">>}, test}))} + , {"init bad arg test2", ?_assertEqual({stop, badarg}, init({test, test}))} + , {"init state", ?_assertEqual(#state{}, init_state([5, #dperlNodeJobDyn{state = #{}}]))} + , {"handle_call", ?_assertEqual({reply, ok, st}, handle_call(req, self(), st))} + , {"handle_cast", ?_assertEqual({noreply, st}, handle_cast(req, st))} + , {"close_file", ?_assertEqual(undefined, close_file(ctx, undefined))} + , {"oneToOne", ?_assertEqual(oneToOne, check_arg_counts(["default"], ["test"]))} + , {"oneToMany", ?_assertEqual(oneToMany, check_arg_counts(["1"], ["test1", "test2"]))} + , {"manyToOne", ?_assertEqual(manyToOne, check_arg_counts(["test1","test2"], ["test"]))} + , {"multipleOneToOne", ?_assertEqual(multipleOneToOne, check_arg_counts(["1", "2"], ["1", "2"]))} + , {"invalid1", ?_assertEqual(invalid, check_arg_counts(["1", "2"], ["1", "2", "3"]))} + , {"invalid2", ?_assertEqual(invalid, check_arg_counts(["1", "2"], ["2", "1"]))} + , {"check args", ?_assertEqual({error, "Src and Dst args does not match"}, check_args(#{a => 1, b => 2}, #{c => 2, a => 1}, #state{}))} + , {"check dir invalid", ?_assertEqual({error, badarg}, connect_check_dir("test", "test"))} + , {"check dir invalid path", ?_assertMatch({error, _}, connect_check_dir("/home", "test"))} + , {"parse args invalid default", ?_assertEqual({error, badarg}, parse_args(#{status_dir => s, status_path => s, status_extra => e}, #{default => #{}}, d, s))} + , {"parse default sftp1", ?_assertMatch({ok, _}, process_default(#{proto => sftp, host => h, root => r, user => u, key => k}))} + , {"parse default sftp2", ?_assertMatch({ok, _}, process_default(#{proto => sftp, host => h, root => r, user => u, password => p}))} + , {"connect check src error", ?_assertMatch({_, {{sfh_error, _}, _}}, catch connect_check_src(#state{src_root = "test", src_conn = #{proto => local}}))} + , {"connect check dst error", ?_assertMatch({_, {{sfh_error, _, _}, _}}, catch connect_check_dst(#state{dst_root = "test", dst_conn = #{proto => local}, dsts = #{}}))} + , {"connect check src conn error", ?_assertMatch({_, {{sfh_error, _, _}, _}}, catch connect_check_src(#state{src_root = "test", src_connected = true, src_ctx = Ctx}))} + , {"connect check dst conn error", ?_assertMatch({_, {{sfh_error, _, _}, _}}, catch connect_check_dst(#state{dst_root = "test", dst_connected = true, dst_ctx = Ctx, dsts = #{}}))} + , {"unhandled info", ?_assertEqual({noreply, test}, handle_info(test, test))} + , {"write dst with file closed", ?_assertMatch({ok, _}, write_tmp(test, #state{file_info = undefined}))} + ] + }. + +parse_args_invalid_dst_test() -> + put(name, <<"test">>), + ?assertEqual({error, badarg}, parse_args(#{status_dir => s, status_path => s, status_extra => e}, #{default => #{proto => cifs, root => r, mask => ""}}, #{default => #{}}, s)). + +init_error_test() -> + put(name, <<"test">>), + ?assertEqual({stop, badarg}, + init({#dperlJob{name = <<"test">>, args = #{status_dir => "test", status_path => "config", status_extra => "e"}, + dstArgs = #{default => #{proto => cifs, root => r, mask => ""}}, + srcArgs = #{default => #{proto => cifs, root => r, mask => ""}}}, #state{}})). + +dstFileName_test_() -> + {inparallel, [ + {T, ?_assertEqual(DF, dstFileName(SF, SM, DM))} + || {T, SF, SM, DM, DF} <- + [ + {"no change", "ABCD1234.txt", "ABCD????.txt", "", "ABCD1234.txt"}, + {"no change *", "ABCD1234.txt", "ABCD????.txt", "*", "ABCD1234.txt"}, + {"no change .*", "ABCD1234.txt", "ABCD????.txt", ".*", "ABCD1234.txt"}, + {"no change *.*", "ABCD1234.txt", "ABCD????.txt", "*.*", "ABCD1234.txt"}, + {"reduce", "ABCD1234.txt", "ABCD????.txt", "????12.txt", "ABCD12.txt"}, + {"remove_extn", "ABCD1234.txt", "ABCD????.txt", "????12", "ABCD12"}, + {"reduce_replace_extn", "ABCD1234.txt", "ABCD????.txt", "????12.csv", "ABCD12.csv"}, + {"extend", "ABCD1234.txt", "ABCD????.txt", "?????SomthingElse", "ABCD1SomthingElse"}, + {"date", "SMCH80-1234-abcdefgh.ascii", "SMCH90-????-########.ascii", + "T080_SBS_SMS_SMSC.csv", "T2020080_SBS_SMS_SMSC.csv"}, + {"MMSC", "F-miopoltmmstn00-20190127234100.123.dat", "F-miop??????????-??????????????.???.dat", + "F-miop??????????-??????????????","F-miopoltmmstn00-20190127234100"}, + {"Hotbill", "HB-SMCH80-abcd-12345678-RAP.xml", "HB-SMCH80-????-########-RAP.xml", + "HB-SMCH80-????-########.xml", "HB-SMCH80-abcd-12345678.xml"} + ] + ] + }. + +dt_test_() -> + {{Y, M, D}, {H, Min, S}} + = calendar:now_to_local_time(os:timestamp()), + Year = lists:flatten(io_lib:format("~4..0B", [Y])), + Month = lists:flatten(io_lib:format("~2..0B", [M])), + Day = lists:flatten(io_lib:format("~2..0B", [D])), + Date = Year ++ Month ++ Day, + Hour = lists:flatten(io_lib:format("~2..0B", [H])), + Minute = lists:flatten(io_lib:format("~2..0B", [Min])), + Second = lists:flatten(io_lib:format("~2..0B", [S])), + Time = Hour ++ Minute ++ Second, + {inparallel, [ + {T, ?_assertEqual(E, dt(DM, {Year, Month, Day}, {Hour, Minute, Second}, []))} + || {T, DM, E} <- + [ + {"all", "YYYYMMDDhhmmss>", Date ++ Time}, + {"date", "YYYYMMDD>", Date}, + {"time", "hhmmss>", Time}, + {"year", "YYYY>", Year}, + {"month", "MM>", Month}, + {"day", "DD>", Day}, + {"hour", "hh>", Hour}, + {"minute", "mm>", Minute}, + {"second", "ss>", Second}, + {"year_month", "YYYYMM>", Year ++ Month}, + {"month_day", "MMDD>", Month ++ Day}, + {"hour_minute", "hhmm>", Hour ++ Minute}, + {"minute_second", "mmss>", Minute ++ Second} + ] + ] + }. + +-endif. diff --git a/src/dperl_ora.erl b/src/dperl_service_oracle.erl similarity index 99% rename from src/dperl_ora.erl rename to src/dperl_service_oracle.erl index 0936d039..e04de62a 100644 --- a/src/dperl_ora.erl +++ b/src/dperl_service_oracle.erl @@ -1,6 +1,6 @@ --module(dperl_ora). +-module(dperl_service_oracle). --include("dperl_ora.hrl"). +-include("dperl_service_oracle.hrl"). -behavior(dperl_worker). -behavior(cowboy_middleware). diff --git a/src/dperl_ora.hrl b/src/dperl_service_oracle.hrl similarity index 100% rename from src/dperl_ora.hrl rename to src/dperl_service_oracle.hrl diff --git a/src/dperl_status_push.erl b/src/dperl_status_push.erl deleted file mode 100644 index 7f6ca8de..00000000 --- a/src/dperl_status_push.erl +++ /dev/null @@ -1,151 +0,0 @@ --module(dperl_status_push). - --include("dperl.hrl"). - --behavior(dperl_worker). --behavior(dperl_strategy_scr). - -% dperl_worker exports --export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, - code_change/3, format_status/2, get_status/1, init_state/1]). - --record(state, {cred, links, imem_sess, name, channel, first_sync = true, - audit_start_time = {0,0}, chunk_size = 200, provs = [], - active_link = 1, mod, func, imem_connected = false}). - -% dperl_strategy_scr export --export([connect_check_src/1, get_source_events/2, connect_check_dst/1, - do_cleanup/2, do_refresh/2, fetch_src/2, fetch_dst/2, delete_dst/2, - insert_dst/3, update_dst/3, report_status/3, is_equal/4]). - -get_source_events(#state{channel = Channel, - audit_start_time = LastStartTime} = State, BulkSize) -> - case length(State#state.provs) > 0 of - true -> {ok, State#state.provs, State#state{provs = []}}; - _ -> - case dperl_dal:read_audit(Channel, LastStartTime, BulkSize) of - {LastStartTime, LastStartTime, []} -> - if State#state.first_sync == true -> - ?JInfo("Audit rollup is complete"), - {ok, sync_complete, State#state{first_sync = false}}; - true -> {ok, sync_complete, State} - end; - {_StartTime, NextStartTime, []} -> - {ok, [], State#state{audit_start_time = NextStartTime}}; - {_StartTime, NextStartTime, Statuses} -> - {ok, [K || #{key := K, nval := NV} <- Statuses, NV /= undefined], - State#state{audit_start_time = NextStartTime}} - end - end. - -connect_check_src(State) -> {ok, State}. - -connect_check_dst(State) -> ?CONNECT_CHECK_IMEM_LINK(State). - -do_cleanup(_, _) -> error(sync_only_job). - -do_refresh(_, _) -> error(sync_only_job). - -fetch_src(Key, #state{channel = Channel}) -> - dperl_dal:job_error_close(Key), - dperl_dal:read_channel(Channel, Key). - -fetch_dst(_Key, _State) -> ?NOT_FOUND. - -delete_dst(_Key, State) -> {false, State}. - -insert_dst([JMod, Chn, SId, Job] = Key, Val, #state{mod = Mod, func = Fun, imem_sess = Sess, - channel = Channel} = State) -> - Count = dperl_dal:count_sibling_jobs(list_to_existing_atom(JMod), Chn), - case Val of - #{auditTime := '$do_not_log'} -> {false, State}; - #{auditTime := Time} -> - FormattedTime = case Time of - undefined -> - undefined; - Time when is_list(Time) -> - list_to_tuple(Time) - end, - Status = #{channel => Chn, shortid => SId, job => Job, - at => FormattedTime}, - NewStatus = case Val of - #{error := Error} -> Status#{error => Error}; - _ -> Status - end, - case catch Sess:run_cmd(dal_exec, [Mod, Fun, [Count, [NewStatus]]]) of - {'EXIT', Err} -> - self() ! connect_src_link, - dperl_dal:job_error(Key, <<"sync">>, <<"process_staupdate_dsttus">>, Err), - ?JError("Status update error ~p", [Err]), - {true, State}; - ok -> - case maps:is_key(error, Val) of - false -> dperl_dal:remove_from_channel(Channel, Key); - true -> no_op - end, - {false, State}; - Other -> - dperl_dal:job_error(Key, <<"sync">>, <<"process_status">>, Other), - ?JWarn("Status update bad return ~p", [Other]), - {true, State} - end - end. - -update_dst(_Key, _Val, State) -> {false, State}. - -report_status(_Key, _, _State) -> no_op. - -is_equal(_Key, ?NOT_FOUND, _, _State) -> true; -is_equal(_Key, _Src, _, _State) -> false. - -get_status(#state{audit_start_time = LastAuditTime}) -> - #{lastAuditTime => LastAuditTime}. - -init_state([]) -> #state{}; -init_state([#dperlNodeJobDyn{state = #{lastAuditTime := LastAuditTime}} | _]) -> - #state{audit_start_time = LastAuditTime}; -init_state([_ | Others]) -> - init_state(Others). - -init({#dperlJob{name=Name, args = _Args, srcArgs = #{channel := Channel}, - dstArgs = #{credential := Credential, links := Links, - mod := Mod, func := Fun}}, State}) -> - dperl_dal:create_check_channel(Channel, [audit, {type,map}]), - ?JInfo("Starting at audit ~s", [dperl_dal:ts_str(State#state.audit_start_time)]), - {ok, State#state{name=Name, cred = Credential, links = Links, - channel = Channel, mod = Mod, func = Fun}}; -init({Args, _}) -> - ?JError("bad start parameters ~p", [Args]), - {stop, badarg}. - -handle_call(Request, _From, State) -> - ?JWarn("handle_call ~p", [Request]), - {reply, ok, State}. - -handle_cast(Request, State) -> - ?JWarn("handle_cast ~p", [Request]), - {noreply, State}. - -handle_info(Request, State) -> - ?JWarn("handle_info ~p", [Request]), - {noreply, State}. - -terminate(Reason, #state{imem_sess = undefined}) -> - ?JInfo("terminate ~p", [Reason]); -terminate(Reason, #state{imem_sess = Session}) -> - ?JInfo("terminate ~p", [Reason]), - try - Session:close() - catch - _:Error -> - dperl_dal:job_error(<<"terminate">>, <<"terminate">>, Error), - ?JError("terminate ~p:~p", [Reason, Error]) - end. - -code_change(OldVsn, State, Extra) -> - ?JInfo("code_change ~p: ~p", [OldVsn, Extra]), - {ok, State}. - -format_status(Opt, [PDict, State]) -> - ?JInfo("format_status ~p: ~p", [Opt, PDict]), - State. diff --git a/src/ouraring_crawl.erl b/src/ouraring_crawl.erl new file mode 100644 index 00000000..5eb42dd5 --- /dev/null +++ b/src/ouraring_crawl.erl @@ -0,0 +1,104 @@ +-module(ouraring_crawl). + +-export([run/0, run/1]). + +-define(OAUTH2_URL_PREFIX, "https://cloud.ouraring.com"). +-define(API_URL_PREFIX, "https://api.ouraring.com"). +run() -> + {ok, _} = application:ensure_all_started(inets), + {ok, _} = application:ensure_all_started(ssl), + run(#{ + client_id => "REMERNOADFFDIN3O", + client_secret => "HYJEW2WTIVIEXQNOTPDN7Y346GYSLNL3", + cb_uri => "https://127.0.0.1:8443/callback", + user_email => "max.ochsenbein@k2informatics.ch", + user_password => "cFMMax--XG$k2sa", + state => "any+value+as+state" + }). + +run(#{ + client_id := ClientId, + client_secret := ClientSecret, + cb_uri := CallbackUri, + user_email := Email, + user_password := Password, + state := State +}) -> + inets:stop(httpc, ?MODULE), + {ok, _} = inets:start(httpc, [{profile, ?MODULE}]), + ok = httpc:set_options([{cookies, enabled}], ?MODULE), + Url = ?OAUTH2_URL_PREFIX ++ "/oauth/authorize" + ++ "?response_type=code" + ++ "&client_id=" ++ ClientId + ++ "&redirect_uri=" ++ edoc_lib:escape_uri(CallbackUri) + ++ "&scope=email+personal+daily" + ++ "&state=" ++ State, + %io:format(">>>>>>>>>> authorize: ~s~n", [Url]), + {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302, []}} = httpc:request(get, {Url, []}, [{autoredirect, false}], [], ?MODULE), + RedirectUri = ?OAUTH2_URL_PREFIX ++ proplists:get_value("location", RespHeader302), + % io:format(">>>>>>>>>> 302 Redirect: ~s~n", [RedirectUri]), + {ok, {{"HTTP/1.1",200,"OK"}, RespHeader, _Body}} = httpc:request(get, {RedirectUri, []}, [{autoredirect, false}], [], ?MODULE), + SetCookieHeader = proplists:get_value("set-cookie", RespHeader), + {match, [XRefCookie]} = re:run(SetCookieHeader, ".*_xsrf=(.*);.*", [{capture, [1], list}]), + {ok,{{"HTTP/1.1",302,"Found"}, RespHeader302_1, []}} = httpc:request( + post, { + RedirectUri, [], "application/x-www-form-urlencoded", + "_xsrf="++edoc_lib:escape_uri(XRefCookie) + ++ "&email=" ++ edoc_lib:escape_uri(Email) + ++ "&password=" ++ edoc_lib:escape_uri(Password) + }, [{autoredirect, false}], [], ?MODULE + ), + RedirectUri_1 = ?OAUTH2_URL_PREFIX ++ proplists:get_value("location", RespHeader302_1), + % io:format(">>>>>>>>>> 302 Redirect: ~s~n", [RedirectUri_1]), + {ok, {{"HTTP/1.1",200,"OK"}, _, _}} = httpc:request(get, {RedirectUri_1, []}, [{autoredirect, false}], [], ?MODULE), + {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302_2, []}} = httpc:request( + post, { + RedirectUri_1, [], "application/x-www-form-urlencoded", + "_xsrf="++edoc_lib:escape_uri(XRefCookie) + ++ "&scope_email=on" + ++ "&scope_personal=on" + ++ "&scope_daily=on" + ++ "&allow=Accept" + }, [{autoredirect, false}], [], ?MODULE + ), + RedirectUri_2 = proplists:get_value("location", RespHeader302_2), + % io:format(">>>>>>>>>> 302 RedirectUri: ~s~n", [RedirectUri_2]), + #{query := QueryString} = uri_string:parse(RedirectUri_2), + #{"code" := Code} = maps:from_list(uri_string:dissect_query(QueryString)), + % io:format(">>>>>>>>>> Code: ~p~n", [Code]), + {ok, {{"HTTP/1.1",200,"OK"}, _, BodyJson}} = httpc:request( + post, { + ?OAUTH2_URL_PREFIX ++ "/oauth/token", [], "application/x-www-form-urlencoded", + "grant_type=authorization_code" + ++ "&code=" ++ Code + ++ "&redirect_uri=" ++ edoc_lib:escape_uri(CallbackUri) + ++ "&client_id=" ++ ClientId + ++ "&client_secret=" ++ ClientSecret + }, [{autoredirect, false}], [], ?MODULE + ), + #{<<"access_token">> := AccessToken} = Auth = jsx:decode(list_to_binary(BodyJson), [return_maps]), + io:format("Auth ~p~n", [Auth]), + io:format("-----~nUserInfo :~n~p~n-----~n", [userinfo(AccessToken)]), + io:format("-----~nSleep :~n~p~n-----~n", [sleep(AccessToken)]).%, + %io:format("-----~nActivity :~n~p~n-- Activity --~n", [activity(AccessToken)]). + +userinfo(AccessToken) -> + {ok,{{"HTTP/1.1",200,"OK"}, _, UserInfoJson}} = httpc:request( + get, {?API_URL_PREFIX ++ "/v1/userinfo", [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, + [{autoredirect, false}], [], ?MODULE + ), + jsx:decode(list_to_binary(UserInfoJson), [return_maps]). + +sleep(AccessToken) -> + {ok,{{"HTTP/1.1",200,"OK"}, _, SleepInfoJson}} = httpc:request( + get, {?API_URL_PREFIX ++ "/v1/sleep", [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, + [{autoredirect, false}], [], ?MODULE + ), + jsx:decode(list_to_binary(SleepInfoJson), [return_maps]). + +activity(AccessToken) -> + {ok,{{"HTTP/1.1",200,"OK"}, _, ActivityInfoJson}} = httpc:request( + get, {?API_URL_PREFIX ++ "/v1/activity", [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, + [{autoredirect, false}], [], ?MODULE + ), + jsx:decode(list_to_binary(ActivityInfoJson), [return_maps]). diff --git a/test.escript b/test.escript new file mode 100644 index 00000000..79f8e9ec --- /dev/null +++ b/test.escript @@ -0,0 +1,28 @@ +ng -*- +%%! -smp enable -pa _build/default/lib/oranif/ebin/ + +-define(TNS, << + "(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=" + "(PROTOCOL=TCP)(HOST=192.1681.160)(PORT=1521)))" + "(CONNECT_DATA=(SERVER=dedicated)(SERVICE_NAME=orclpdb1)))" + >>). +-define(USER, <<"scott">>). +-define(PSWD, <<"regit">>). + +-define(TEST_SQL, <<"select 1 from dual">>). + + +main(_) -> + ok = dpi:load_unsafe(), + Ctx = dpi:context_create(3, 0), + Conn = dpi:conn_create(Ctx, ?USER, ?PSWD, ?TNS, #{}, #{}), + Stmt = dpi:conn_prepareStmt(Conn, false, ?TEST_SQL, <<>>), + 1 = dpi:stmt_execute(Stmt, []), + #{found := true} = dpi:stmt_fetch(Stmt), + #{data := Result} = + dpi:stmt_getQueryValue(Stmt, 1), + 1.0 = dpi:data_get(Result), + io:format("done ~n", []), + halt(1). + + From 35d76fb471b623b5f105fe29943860df8d1da24c Mon Sep 17 00:00:00 2001 From: shamis Date: Wed, 15 Apr 2020 16:47:50 +0200 Subject: [PATCH 03/72] dperl/jobs and dper/services folder --- src/{ => dperl}/dperl.hrl | 0 src/{ => dperl}/dperl_cp.erl | 12 ++++++------ src/{ => dperl}/dperl_dal.erl | 5 ++--- src/{ => dperl}/dperl_metrics.erl | 2 +- src/{ => dperl}/dperl_strategy_scr.erl | 12 ++++++------ src/{ => dperl}/dperl_sup.erl | 0 src/{ => dperl}/dperl_worker.erl | 0 src/{ => dperl}/dperl_worker_sup.erl | 0 src/{ => dperl/jobs}/dperl_file_copy.erl | 10 +++++----- src/{ => dperl/jobs}/dperl_skvh_copy.erl | 6 +++--- src/{ => dperl/jobs}/dperl_status.hrl | 2 +- src/{ => dperl/jobs}/dperl_status_agr.erl | 0 src/{ => dperl/jobs}/dperl_status_pre.erl | 4 ++-- src/{ => dperl/jobs}/dperl_status_pull.erl | 0 src/{ => dperl/services}/dperl_service_oracle.erl | 8 ++++---- src/{ => dperl/services}/dperl_service_oracle.hrl | 2 +- 16 files changed, 31 insertions(+), 32 deletions(-) rename src/{ => dperl}/dperl.hrl (100%) rename src/{ => dperl}/dperl_cp.erl (98%) rename src/{ => dperl}/dperl_dal.erl (99%) rename src/{ => dperl}/dperl_metrics.erl (99%) rename src/{ => dperl}/dperl_strategy_scr.erl (99%) rename src/{ => dperl}/dperl_sup.erl (100%) rename src/{ => dperl}/dperl_worker.erl (100%) rename src/{ => dperl}/dperl_worker_sup.erl (100%) rename src/{ => dperl/jobs}/dperl_file_copy.erl (99%) rename src/{ => dperl/jobs}/dperl_skvh_copy.erl (99%) rename src/{ => dperl/jobs}/dperl_status.hrl (98%) rename src/{ => dperl/jobs}/dperl_status_agr.erl (100%) rename src/{ => dperl/jobs}/dperl_status_pre.erl (98%) rename src/{ => dperl/jobs}/dperl_status_pull.erl (100%) rename src/{ => dperl/services}/dperl_service_oracle.erl (99%) rename src/{ => dperl/services}/dperl_service_oracle.hrl (99%) diff --git a/src/dperl.hrl b/src/dperl/dperl.hrl similarity index 100% rename from src/dperl.hrl rename to src/dperl/dperl.hrl diff --git a/src/dperl_cp.erl b/src/dperl/dperl_cp.erl similarity index 98% rename from src/dperl_cp.erl rename to src/dperl/dperl_cp.erl index 9f8b4436..ad41ee49 100644 --- a/src/dperl_cp.erl +++ b/src/dperl/dperl_cp.erl @@ -145,8 +145,8 @@ check_workers([JobOrService | JobsOrServices], DNodes) -> end end catch - _:Error -> - ?Error("invalid new config ~p. job/service skipped", [Error], ?ST), + _:Error:Stacktrace -> + ?Error("invalid new config ~p. job/service skipped", [Error], Stacktrace), dperl_dal:disable(JobOrService), ok end, @@ -258,9 +258,9 @@ start(JobOrService, Mod, Name) -> ?Debug("start ~p_~s", [Mod, Name]), try dperl_worker:start(imem_config:reference_resolve(JobOrService)) catch - _:Error -> + _:Error:Stacktrace -> ?Error([{enum, dperl_dal:to_atom(Name)}], - "~p disabled, on start ~p", [Mod, Error], ?ST), + "~p disabled, on start ~p", [Mod, Error], Stacktrace), dperl_dal:disable(JobOrService) end. @@ -272,9 +272,9 @@ stop(JobOrService, Mod, Name) -> ?Debug("stop ~p_~s", [Mod, Name]), try dperl_worker:stop(Mod, Name) catch - _:Error -> + _:Error:Stacktrace -> ?Error([{enum, dperl_dal:to_atom(Name)}], - "~p disabled, on stop ~p at ~p", [Mod, Error, ?ST]), + "~p disabled, on stop ~p at ~p", [Mod, Error, Stacktrace]), dperl_dal:disable(JobOrService) end; false -> no_op diff --git a/src/dperl_dal.erl b/src/dperl/dperl_dal.erl similarity index 99% rename from src/dperl_dal.erl rename to src/dperl/dperl_dal.erl index b87ff3ea..ba242cd6 100644 --- a/src/dperl_dal.erl +++ b/src/dperl/dperl_dal.erl @@ -408,9 +408,8 @@ report_status(Module, StatusTable, {Channel, ShortId}, JobName, Status) -> write_channel(StatusTable, [atom_to_list(Module), Channel, ShortId, JobName], Status) catch - C:E -> - ?Error("~p,~p to ~p : ~p", - [Channel, ShortId, StatusTable, {C,E}], ?ST) + C:E:S -> + ?Error("~p,~p to ~p : ~p", [Channel, ShortId, StatusTable, {C,E}], S) end. %% pusher report_status diff --git a/src/dperl_metrics.erl b/src/dperl/dperl_metrics.erl similarity index 99% rename from src/dperl_metrics.erl rename to src/dperl/dperl_metrics.erl index 4fae5edc..d2c53aa9 100644 --- a/src/dperl_metrics.erl +++ b/src/dperl/dperl_metrics.erl @@ -1,7 +1,7 @@ -module(dperl_metrics). -include("dperl.hrl"). --include("dperl_status.hrl"). +-include_lib("jobs/dperl_status.hrl"). -behaviour(imem_gen_metrics). diff --git a/src/dperl_strategy_scr.erl b/src/dperl/dperl_strategy_scr.erl similarity index 99% rename from src/dperl_strategy_scr.erl rename to src/dperl/dperl_strategy_scr.erl index 99887509..da45ec4f 100644 --- a/src/dperl_strategy_scr.erl +++ b/src/dperl/dperl_strategy_scr.erl @@ -84,20 +84,20 @@ execute(Mod, Job, State, #{sync := _, cleanup := _, refresh := _} = Args) when is_map(Args) -> try execute(sync, Mod, Job, State, Args) catch - Class:{step_failed, NewArgs} when is_map(NewArgs) -> - ?JError("~p ~p step_failed~n~p", [Mod, Class, erlang:get_stacktrace()]), + Class:{step_failed, NewArgs}:Stacktrace when is_map(NewArgs) -> + ?JError("~p ~p step_failed~n~p", [Mod, Class, Stacktrace]), dperl_dal:update_job_dyn(Job, error), ?RESTART_AFTER(?CYCLE_ERROR_WAIT(Mod, Job), NewArgs), dperl_dal:job_error(get(jstate), atom_to_binary(Class, utf8), Class), State; - Class:{step_failed, NewState} -> - ?JError("~p ~p step_failed~n~p", [Mod, Class, erlang:get_stacktrace()]), + Class:{step_failed, NewState}:Stacktrace -> + ?JError("~p ~p step_failed~n~p", [Mod, Class, Stacktrace]), dperl_dal:update_job_dyn(Job, Mod:get_status(NewState), error), ?RESTART_AFTER(?CYCLE_ERROR_WAIT(Mod, Job), Args), dperl_dal:job_error(get(jstate), <<"step failed">>, Class), NewState; - Class:Error -> - ?JError("~p ~p ~p~n~p", [Mod, Class, Error, erlang:get_stacktrace()]), + Class:Error:Stacktrace -> + ?JError("~p ~p ~p~n~p", [Mod, Class, Error, Stacktrace]), dperl_dal:update_job_dyn(Job, error), ?RESTART_AFTER(?CYCLE_ERROR_WAIT(Mod, Job), Args), dperl_dal:job_error(get(jstate), atom_to_binary(Class, utf8), Class), diff --git a/src/dperl_sup.erl b/src/dperl/dperl_sup.erl similarity index 100% rename from src/dperl_sup.erl rename to src/dperl/dperl_sup.erl diff --git a/src/dperl_worker.erl b/src/dperl/dperl_worker.erl similarity index 100% rename from src/dperl_worker.erl rename to src/dperl/dperl_worker.erl diff --git a/src/dperl_worker_sup.erl b/src/dperl/dperl_worker_sup.erl similarity index 100% rename from src/dperl_worker_sup.erl rename to src/dperl/dperl_worker_sup.erl diff --git a/src/dperl_file_copy.erl b/src/dperl/jobs/dperl_file_copy.erl similarity index 99% rename from src/dperl_file_copy.erl rename to src/dperl/jobs/dperl_file_copy.erl index a8e2ba2a..7f1dd027 100644 --- a/src/dperl_file_copy.erl +++ b/src/dperl/jobs/dperl_file_copy.erl @@ -1,6 +1,6 @@ -module(dperl_file_copy). --include("dperl.hrl"). +-include_lib("dperl/dperl.hrl"). -behavior(dperl_worker). @@ -128,14 +128,14 @@ handle_info(execute, #state{name = Job} = State) -> erlang:send_after(?CYCLE_ALWAYS_WAIT(?MODULE, Job), self(), execute), {noreply, State5} catch - error:{sfh_error, Error, State5} -> - ?JError("~p ~p step_failed~n~p", [?MODULE, Error, erlang:get_stacktrace()]), + error:{sfh_error, Error, State5}:Stacktrace -> + ?JError("~p ~p step_failed~n~p", [?MODULE, Error, Stacktrace]), dperl_dal:update_job_dyn(Job, #{}, error), erlang:send_after(?CYCLE_ERROR_WAIT(?MODULE, Job), self(), execute), dperl_dal:job_error(get(jstate), <<"step failed">>, Error), {noreply, State5}; - Class:Error -> - ?JError("~p ~p ~p~n~p", [?MODULE, Class, Error, erlang:get_stacktrace()]), + Class:Error:Stacktrace -> + ?JError("~p ~p ~p~n~p", [?MODULE, Class, Error, Stacktrace]), dperl_dal:update_job_dyn(Job, error), erlang:send_after(?CYCLE_ERROR_WAIT(?MODULE, Job), self(), execute), dperl_dal:job_error(get(jstate), atom_to_binary(Class, utf8), Error), diff --git a/src/dperl_skvh_copy.erl b/src/dperl/jobs/dperl_skvh_copy.erl similarity index 99% rename from src/dperl_skvh_copy.erl rename to src/dperl/jobs/dperl_skvh_copy.erl index e8d1d839..a148fad1 100644 --- a/src/dperl_skvh_copy.erl +++ b/src/dperl/jobs/dperl_skvh_copy.erl @@ -1,6 +1,6 @@ -module(dperl_skvh_copy). --include("dperl.hrl"). +-include_lib("dperl/dperl.hrl"). -behavior(dperl_worker). -behavior(dperl_strategy_scr). @@ -159,10 +159,10 @@ terminate(Reason, #state{srcImemSess = SrcSession, dstImemSess = DstSession}) -> [Session:close() || Session <- [SrcSession, DstSession], Session /= undefined], ?JInfo("terminate ~p", [Reason]) catch - _:Error -> + _:Error:Stacktrace -> dperl_dal:job_error(<<"terminate">>, <<"terminate">>, Error), ?JError("terminate ~p:~p ~p", - [Reason, Error, erlang:get_stacktrace()]) + [Reason, Error, Stacktrace]) end. code_change(OldVsn, State, Extra) -> diff --git a/src/dperl_status.hrl b/src/dperl/jobs/dperl_status.hrl similarity index 98% rename from src/dperl_status.hrl rename to src/dperl/jobs/dperl_status.hrl index 0546dced..20e062d3 100644 --- a/src/dperl_status.hrl +++ b/src/dperl/jobs/dperl_status.hrl @@ -1,7 +1,7 @@ -ifndef(_dperl_STATUS_HRL_). -define(_dperl_STATUS_HRL_, true). --include("dperl.hrl"). +-include_lib("dperl/dperl.hrl"). -record(context, { name :: list(), diff --git a/src/dperl_status_agr.erl b/src/dperl/jobs/dperl_status_agr.erl similarity index 100% rename from src/dperl_status_agr.erl rename to src/dperl/jobs/dperl_status_agr.erl diff --git a/src/dperl_status_pre.erl b/src/dperl/jobs/dperl_status_pre.erl similarity index 98% rename from src/dperl_status_pre.erl rename to src/dperl/jobs/dperl_status_pre.erl index 939ee47a..408b8da0 100644 --- a/src/dperl_status_pre.erl +++ b/src/dperl/jobs/dperl_status_pre.erl @@ -7,8 +7,8 @@ preprocess(MetricKey, Value, Timestamp, Node, Ctx) -> try preprocess_internal(MetricKey, Value, Timestamp, Node, Ctx) catch - Error:Exception -> - ?Error("Unable to format the Metric ~p : ~p, error ~p:~p ~p", [MetricKey, Value, Error, Exception, erlang:get_stacktrace()]), + Error:Exception:Stacktrace -> + ?Error("Unable to format the Metric ~p : ~p, error ~p:~p ~p", [MetricKey, Value, Error, Exception, Stacktrace]), {[], Ctx} end. diff --git a/src/dperl_status_pull.erl b/src/dperl/jobs/dperl_status_pull.erl similarity index 100% rename from src/dperl_status_pull.erl rename to src/dperl/jobs/dperl_status_pull.erl diff --git a/src/dperl_service_oracle.erl b/src/dperl/services/dperl_service_oracle.erl similarity index 99% rename from src/dperl_service_oracle.erl rename to src/dperl/services/dperl_service_oracle.erl index e04de62a..dd994095 100644 --- a/src/dperl_service_oracle.erl +++ b/src/dperl/services/dperl_service_oracle.erl @@ -362,12 +362,12 @@ init_interface(#{baseUrl := BaseUrl, commonWhitelist := CWhiteList, end, FilteredListenerIps), {ok, State#state{listeners = FilteredListenerIps, port = Port}} catch - error : {badmatch,{error,{already_started,_}}} = Error -> - ?SError("error:~p~n~p", [Error, erlang:get_stacktrace()]), + error:{badmatch,{error,{already_started,_}}} = Error:Stacktrace -> + ?SError("error:~p~n~p", [Error, Stacktrace]), stop_listeners(Error, FilteredListenerIps, Port), init_interface(Intf, State); - Class:Error -> - ?SError("~p:~p~n~p", [Class, Error, erlang:get_stacktrace()]), + Class:Error:Stacktrace -> + ?SError("~p:~p~n~p", [Class, Error, Stacktrace]), {error, Error} end. diff --git a/src/dperl_service_oracle.hrl b/src/dperl/services/dperl_service_oracle.hrl similarity index 99% rename from src/dperl_service_oracle.hrl rename to src/dperl/services/dperl_service_oracle.hrl index 435bb385..e31ffa6d 100644 --- a/src/dperl_service_oracle.hrl +++ b/src/dperl/services/dperl_service_oracle.hrl @@ -1,7 +1,7 @@ -ifndef(_dperl_ora_). -define(_dperl_ora_, true). --include("dperl.hrl"). +-include_lib("dperl/dperl.hrl"). -define(MAX_COMMENT_LENGTH, 200). -define(MAX_REQUESTOR_LENGTH, 20). From bddb6839474e46a01c4def7767c24d6ba79588de Mon Sep 17 00:00:00 2001 From: shamis Date: Wed, 15 Apr 2020 16:52:48 +0200 Subject: [PATCH 04/72] removed warnings --- src/dperl/services/dperl_service_oracle.erl | 10 +++++----- src/ouraring_crawl.erl | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/dperl/services/dperl_service_oracle.erl b/src/dperl/services/dperl_service_oracle.erl index dd994095..b694309b 100644 --- a/src/dperl/services/dperl_service_oracle.erl +++ b/src/dperl/services/dperl_service_oracle.erl @@ -399,10 +399,10 @@ local_ips(ListenerAddresses) -> end, [], ListenerAddresses ). -init_resources(#{credential := #{user := User, password := Password}, - links := Links} = Resources, - #state{active_link = ActiveLink} = State) -> - #{opt := Opts, tns := TNS} = lists:nth(ActiveLink, Links), +init_resources(#{credential := #{user := _User, password := _Password}, + links := Links} = _Resources, + #state{active_link = ActiveLink} = _State) -> + #{opt := _Opts, tns := _TNS} = lists:nth(ActiveLink, Links), {error, unimplemented}. %% TODO : reimplement without erlocipool %Options = dperl_dal:oci_opts(?ERLOCIPOOL_LOG_CB, Opts), @@ -481,7 +481,7 @@ init_resources(#{credential := #{user := User, password := Password}, %end. -spec create_stmt(atom(), binary(), list()) -> tuple(). -create_stmt(Pool, Sql, Binds) -> +create_stmt(_Pool, _Sql, _Binds) -> {error, unimplemented}. %% TODO % ?OciStmt(Pool, Sql, Binds, Stmt), diff --git a/src/ouraring_crawl.erl b/src/ouraring_crawl.erl index 5eb42dd5..8f237caa 100644 --- a/src/ouraring_crawl.erl +++ b/src/ouraring_crawl.erl @@ -79,8 +79,8 @@ run(#{ #{<<"access_token">> := AccessToken} = Auth = jsx:decode(list_to_binary(BodyJson), [return_maps]), io:format("Auth ~p~n", [Auth]), io:format("-----~nUserInfo :~n~p~n-----~n", [userinfo(AccessToken)]), - io:format("-----~nSleep :~n~p~n-----~n", [sleep(AccessToken)]).%, - %io:format("-----~nActivity :~n~p~n-- Activity --~n", [activity(AccessToken)]). + io:format("-----~nSleep :~n~p~n-----~n", [sleep(AccessToken)]), + io:format("-----~nActivity :~n~p~n-- Activity --~n", [activity(AccessToken)]). userinfo(AccessToken) -> {ok,{{"HTTP/1.1",200,"OK"}, _, UserInfoJson}} = httpc:request( From b1652a46778cc587f9029538a0986d20f0b98c04 Mon Sep 17 00:00:00 2001 From: shamis Date: Sun, 19 Apr 2020 00:23:51 +0200 Subject: [PATCH 05/72] dper ouraring job WIP --- rebar.config | 3 +- src/dperl/jobs/dperl_ouraring_crawl.erl | 271 ++++++++++++++++++++++++ 2 files changed, 273 insertions(+), 1 deletion(-) create mode 100644 src/dperl/jobs/dperl_ouraring_crawl.erl diff --git a/rebar.config b/rebar.config index e47db9cc..3b8e538a 100644 --- a/rebar.config +++ b/rebar.config @@ -42,7 +42,8 @@ {esaml, {git, "https://github.com/K2InformaticsGmbH/esaml", {tag, "2.3.0"}}}, {imem, {git, "https://bitbucket.org/konnexions/imem", {tag, "3.9.0"}}}, {oranif, {git, "https://github.com/c-bik/oranif", {branch, "master"}}}, - {prometheus, "4.5.0"} + {prometheus, "4.5.0"}, + {edate, {git, "https://github.com/dweldon/edate", {branch, "master"}}} ]}. {erl_first_files, [ diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl new file mode 100644 index 00000000..f98fd933 --- /dev/null +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -0,0 +1,271 @@ +-module(dperl_ouraring_crawl). + +-include_lib("dperl/dperl.hrl"). + +-behavior(dperl_worker). +-behavior(dperl_strategy_scr). + +-define(SHIFT_DAYS(__JOB_NAME), + ?GET_CONFIG(daysToBeShiftedAtStart, [__JOB_NAME], 100, + "Days to be shifted backwards for starting the job") + ). + +% dperl_worker exports +-export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, + get_status/1, init_state/1]). + +-record(state, {name, channel, client_id, client_secret, password, email, + cb_uri, is_connected = false, access_token, api_url, oauth_url, + last_day, infos = []}). + +% dperl_strategy_scr export +-export([connect_check_src/1, get_source_events/2, connect_check_dst/1, + do_cleanup/2, do_refresh/2, + fetch_src/2, fetch_dst/2, delete_dst/2, insert_dst/3, + update_dst/3, report_status/3]). + +connect_check_src(#state{is_connected = false, client_id = ClientId, cb_uri = CallbackUri, + client_secret = ClientSecret, password = Password, + email = Email, oauth_url = OauthUrl} = State) -> + inets:start(httpc, [{profile, ?MODULE}]), + ok = httpc:set_options([{cookies, enabled}], ?MODULE), + Url = OauthUrl ++ "/oauth/authorize" + ++ "?response_type=code" + ++ "&client_id=" ++ ClientId + ++ "&redirect_uri=" ++ edoc_lib:escape_uri(CallbackUri) + ++ "&scope=email+personal+daily" + ++ "&state=" ++ "test", + %io:format(">>>>>>>>>> authorize: ~s~n", [Url]), + case httpc:request(get, {Url, []}, [{autoredirect, false}], [], ?MODULE) of + {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302, []}} -> + RedirectUri = OauthUrl ++ proplists:get_value("location", RespHeader302), + % io:format(">>>>>>>>>> 302 Redirect: ~s~n", [RedirectUri]), + {ok, {{"HTTP/1.1",200,"OK"}, RespHeader, _Body}} = httpc:request(get, {RedirectUri, []}, [{autoredirect, false}], [], ?MODULE), + SetCookieHeader = proplists:get_value("set-cookie", RespHeader), + {match, [XRefCookie]} = re:run(SetCookieHeader, ".*_xsrf=(.*);.*", [{capture, [1], list}]), + {ok,{{"HTTP/1.1",302,"Found"}, RespHeader302_1, []}} = httpc:request( + post, { + RedirectUri, [], "application/x-www-form-urlencoded", + "_xsrf="++edoc_lib:escape_uri(XRefCookie) + ++ "&email=" ++ edoc_lib:escape_uri(Email) + ++ "&password=" ++ edoc_lib:escape_uri(Password) + }, [{autoredirect, false}], [], ?MODULE + ), + RedirectUri_1 = OauthUrl ++ proplists:get_value("location", RespHeader302_1), + % io:format(">>>>>>>>>> 302 Redirect: ~s~n", [RedirectUri_1]), + {ok, {{"HTTP/1.1",200,"OK"}, _, _}} = httpc:request(get, {RedirectUri_1, []}, [{autoredirect, false}], [], ?MODULE), + {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302_2, []}} = httpc:request( + post, { + RedirectUri_1, [], "application/x-www-form-urlencoded", + "_xsrf="++edoc_lib:escape_uri(XRefCookie) + ++ "&scope_email=on" + ++ "&scope_personal=on" + ++ "&scope_daily=on" + ++ "&allow=Accept" + }, [{autoredirect, false}], [], ?MODULE + ), + RedirectUri_2 = proplists:get_value("location", RespHeader302_2), + % io:format(">>>>>>>>>> 302 RedirectUri: ~s~n", [RedirectUri_2]), + #{query := QueryString} = uri_string:parse(RedirectUri_2), + #{"code" := Code} = maps:from_list(uri_string:dissect_query(QueryString)), + % io:format(">>>>>>>>>> Code: ~p~n", [Code]), + {ok, {{"HTTP/1.1",200,"OK"}, _, BodyJson}} = httpc:request( + post, { + OauthUrl ++ "/oauth/token", [], "application/x-www-form-urlencoded", + "grant_type=authorization_code" + ++ "&code=" ++ Code + ++ "&redirect_uri=" ++ edoc_lib:escape_uri(CallbackUri) + ++ "&client_id=" ++ ClientId + ++ "&client_secret=" ++ ClientSecret + }, [{autoredirect, false}], [], ?MODULE + ), + #{<<"access_token">> := AccessToken} = Auth = jsx:decode(list_to_binary(BodyJson), [return_maps]), + ?JInfo("Auth is : ~p", [Auth]), + {ok, State#state{is_connected = true, access_token = AccessToken}}; + {ok, {{_, 200, _}, _, Body}} -> + ?JInfo("code : ~p body : ~p", [200, Body]), + ?JInfo("!!!! cookies :~p", [httpc:which_cookies(?MODULE)]), + {error, Body, State} + end; +connect_check_src(State) -> {ok, State}. + +get_source_events(#state{infos = []} = State, _BulkSize) -> + {ok, sync_complete, State}; +get_source_events(#state{infos = Infos} = State, _BulkSize) -> + {ok, Infos, State#state{infos = []}}. + +connect_check_dst(State) -> {ok, State}. + +do_refresh(_State, _BulkSize) -> {error, cleanup_only}. + +fetch_src({_Key, Value}, _State) -> Value. + +fetch_dst({Key, _}, State) -> + dperl_dal:read_channel(State#state.channel, Key). + +insert_dst(Key, Val, State) -> + update_dst(Key, Val, State). + +report_status(_Key, _Status, _State) -> no_op. + +do_cleanup(State, _BlkCount) -> + {ok, State1} = connect_check_src(State), + {DayQuery, State2} = get_day(State1), + State3 = lists:foldl( + fun(Fun, Acc) -> + try Fun(DayQuery, Acc) + catch E:C:S -> + ?JError("E : ~p, C : ~p, S : ~p", [E, C, S]), + {ok, Acc1} = connect_check_src(Acc#state{is_connected = false}), + Fun(Acc1) + end + end, State2, [fun fetch_userinfo/2, fun fetch_activity/2, + fun fetch_sleep/2, fun fetch_readiness/2]), + case State3#state.infos of + [_] -> + {ok, finish, State3#state{infos = []}}; + Infos -> + ?Info("Infos : ~p", [Infos]), + {ok, State3} + end. + +delete_dst(Key, #state{channel = Channel} = State) -> + ?JInfo("Deleting : ~p", [Key]), + dperl_dal:remove_from_channel(Channel, Key), + {false, State}. + +update_dst({Key, _}, Val, State) -> + update_dst(Key, Val, State); +update_dst(Key, Val, #state{channel = Channel} = State) when is_binary(Val) -> + dperl_dal:write_channel(Channel, Key, Val), + {false, State}; +update_dst(Key, Val, State) -> + update_dst(Key, imem_json:encode(Val), State). + +get_status(#state{last_day = LastDay}) -> + #{lastDay => LastDay}. + +init_state([]) -> #state{}; +init_state([#dperlNodeJobDyn{state = #{lastDay := LastDay}} | _]) -> + #state{last_day = LastDay}; +init_state([_ | Others]) -> + init_state(Others). + +init({#dperlJob{name=Name, dstArgs = #{channel := Channel}, + srcArgs = #{client_id := ClientId, user_password := Password, + client_secret := ClientSecret, user_email := Email, + cb_uri := CallbackUri, api_url := ApiUrl, + oauth_url := OauthUrl}}, State}) -> + ?JInfo("Starting from : ~s", [State#state.last_day]), + ChannelBin = dperl_dal:to_binary(Channel), + dperl_dal:create_check_channel(ChannelBin), + {ok, State#state{channel = ChannelBin, client_id = ClientId, + client_secret = ClientSecret, password = Password, + email = Email, cb_uri = CallbackUri, name = Name, + api_url = ApiUrl, oauth_url = OauthUrl}}; +init(Args) -> + ?JError("bad start parameters ~p", [Args]), + {stop, badarg}. + +handle_call(Request, _From, State) -> + ?JWarn("Unsupported handle_call ~p", [Request]), + {reply, ok, State}. + +handle_cast(Request, State) -> + ?JWarn("Unsupported handle_cast ~p", [Request]), + {noreply, State}. + +handle_info(Request, State) -> + ?JWarn("Unsupported handle_info ~p", [Request]), + {noreply, State}. + +terminate(Reason, _State) -> + httpc:reset_cookies(?MODULE), + ?JInfo("terminate ~p", [Reason]). + +fetch_userinfo(_, #state{api_url = ApiUrl, access_token = AccessToken} = State) -> + {ok,{{"HTTP/1.1",200,"OK"}, _, UserInfoJson}} = httpc:request( + get, {ApiUrl ++ "/v1/userinfo", [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, + [{autoredirect, false}], [], ?MODULE + ), + UserInfo = imem_json:decode(list_to_binary(UserInfoJson), [return_maps]), + Info = {["ouraring", "userinfo"], UserInfo}, + State#state{infos = [Info | State#state.infos]}. + +get_day(State) -> + Key = ["ouraring", "sleep"], + Yesterday = edate:yesterday(), + YDayStr = edate:date_to_string(Yesterday), + Day = + case dperl_dal:read_channel(State#state.channel, Key) of + ?NOT_FOUND -> + case State#state.last_day of + undefined -> + SDays = ?SHIFT_DAYS(State#state.name), + edate:date_to_string(edate:shift(-1 * SDays, days)); + YDayStr -> + YDayStr; + LastDay -> + edate:date_to_string(edate:shift(edate:string_to_date(LastDay), 1, days)) + end; + #{<<"_day">> := DayBin} -> + DayStr = binary_to_list(DayBin), + case {edate:string_to_date(DayStr), Yesterday} of + {D, D} -> DayStr; + {D1, D2} when D1 < D2 -> edate:date_to_string(edate:shift(D1, 1, day)); + {_, Yesterday} -> edate:date_to_string(Yesterday) + end + end, + DayQuery = "?start=" ++ Day ++ "&end=" ++ Day, + {DayQuery, State#state{last_day = Day}}. + +fetch_sleep(DayQuery, #state{api_url = ApiUrl, access_token = AccessToken} = State) -> + {ok,{{"HTTP/1.1",200,"OK"}, _, SleepInfoJson}} = httpc:request( + get, {ApiUrl ++ "/v1/sleep" ++ DayQuery, [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, + [{autoredirect, false}], [], ?MODULE + ), + case imem_json:decode(list_to_binary(SleepInfoJson), [return_maps]) of + #{<<"sleep">> := []} -> + State; + Sleep -> + Info = {["ouraring", "sleep"], Sleep#{<<"_day">> => list_to_binary(State#state.last_day)}}, + State#state{infos = [Info | State#state.infos]} + end. + +fetch_activity(DayQuery, #state{api_url = ApiUrl, access_token = AccessToken} = State) -> + {ok,{{"HTTP/1.1",200,"OK"}, _, ActivityInfoJson}} = httpc:request( + get, {ApiUrl ++ "/v1/activity" ++ DayQuery, [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, + [{autoredirect, false}], [], ?MODULE + ), + case imem_json:decode(list_to_binary(ActivityInfoJson), [return_maps]) of + #{<<"activity">> := []} -> + State; + Activity -> + Info = {["ouraring", "activity"], Activity#{<<"_day">> => list_to_binary(State#state.last_day)}}, + State#state{infos = [Info | State#state.infos]} + end. + +fetch_readiness(DayQuery, #state{api_url = ApiUrl, access_token = AccessToken} = State) -> + {ok,{{"HTTP/1.1",200,"OK"}, _, ReadinessJson}} = httpc:request( + get, {ApiUrl ++ "/v1/readiness" ++ DayQuery, [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, + [{autoredirect, false}], [], ?MODULE + ), + case imem_json:decode(list_to_binary(ReadinessJson), [return_maps]) of + #{<<"readiness">> := []} -> + State; + Readiness -> + Info = {["ouraring", "readiness"], Readiness#{<<"_day">> => list_to_binary(State#state.last_day)}}, + State#state{infos = [Info | State#state.infos]} + end. + +% format_links(Links) -> +% lists:map( +% fun(#{url := Url} = Link) -> +% NewUrl = +% case lists:last(Url) of +% $/ -> Url; +% _ -> Url ++ "/" +% end, +% Link#{url := NewUrl}; +% (Link) -> Link +% end, Links). From 8b57ceb37dc601421367415f61493c8a17810a20 Mon Sep 17 00:00:00 2001 From: shamis Date: Mon, 20 Apr 2020 18:22:17 +0200 Subject: [PATCH 06/72] dper oura fetch wIP --- src/dperl/jobs/dperl_ouraring_crawl.erl | 151 ++++++++++++------------ 1 file changed, 78 insertions(+), 73 deletions(-) diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index f98fd933..88dec514 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -16,7 +16,7 @@ -record(state, {name, channel, client_id, client_secret, password, email, cb_uri, is_connected = false, access_token, api_url, oauth_url, - last_day, infos = []}). + last_sleep_day, last_activity_day, last_readiness_day, infos = []}). % dperl_strategy_scr export -export([connect_check_src/1, get_source_events/2, connect_check_dst/1, @@ -110,22 +110,24 @@ report_status(_Key, _Status, _State) -> no_op. do_cleanup(State, _BlkCount) -> {ok, State1} = connect_check_src(State), - {DayQuery, State2} = get_day(State1), - State3 = lists:foldl( - fun(Fun, Acc) -> - try Fun(DayQuery, Acc) - catch E:C:S -> - ?JError("E : ~p, C : ~p, S : ~p", [E, C, S]), - {ok, Acc1} = connect_check_src(Acc#state{is_connected = false}), - Fun(Acc1) + State2 = lists:foldl( + fun(Type, Acc) -> + case get_day(Type, Acc) of + fetched -> Acc; + Day -> + try fetch_metric(Type, Day, Acc) + catch E:C:S -> + ?JError("E : ~p, C : ~p, S : ~p", [E, C, S]), + {ok, Acc1} = connect_check_src(Acc#state{is_connected = false}), + fetch_metric(Type, Day, Acc1) + end end - end, State2, [fun fetch_userinfo/2, fun fetch_activity/2, - fun fetch_sleep/2, fun fetch_readiness/2]), - case State3#state.infos of - [_] -> - {ok, finish, State3#state{infos = []}}; - Infos -> - ?Info("Infos : ~p", [Infos]), + end, State1, ["sleep", "activity", "readiness"]), + State3 = fetch_userinfo(State2), + case State2#state.infos of + [] -> + {ok, finish, State3}; + _ -> {ok, State3} end. @@ -142,12 +144,19 @@ update_dst(Key, Val, #state{channel = Channel} = State) when is_binary(Val) -> update_dst(Key, Val, State) -> update_dst(Key, imem_json:encode(Val), State). -get_status(#state{last_day = LastDay}) -> - #{lastDay => LastDay}. +get_status(#state{last_sleep_day = LastSleepDay, + last_activity_day = LastActivityDay, + last_readiness_day = LastReadinessDay}) -> + #{lastSleepDay => LastSleepDay, lastActivityDay => LastActivityDay, + lastReadinessDay => LastReadinessDay}. init_state([]) -> #state{}; -init_state([#dperlNodeJobDyn{state = #{lastDay := LastDay}} | _]) -> - #state{last_day = LastDay}; +init_state([#dperlNodeJobDyn{state = State} | _]) -> + LastSleepDay = maps:get(lastSleepDay, State, undefined), + LastActivityDay = maps:get(lastActivityDay, State, undefined), + LastReadinessDay = maps:get(lastReadinessDay, State, undefined), + #state{last_sleep_day = LastSleepDay, last_activity_day = LastActivityDay, + last_readiness_day = LastReadinessDay}; init_state([_ | Others]) -> init_state(Others). @@ -156,7 +165,7 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel}, client_secret := ClientSecret, user_email := Email, cb_uri := CallbackUri, api_url := ApiUrl, oauth_url := OauthUrl}}, State}) -> - ?JInfo("Starting from : ~s", [State#state.last_day]), + ?JInfo("Starting ..."), ChannelBin = dperl_dal:to_binary(Channel), dperl_dal:create_check_channel(ChannelBin), {ok, State#state{channel = ChannelBin, client_id = ClientId, @@ -183,7 +192,7 @@ terminate(Reason, _State) -> httpc:reset_cookies(?MODULE), ?JInfo("terminate ~p", [Reason]). -fetch_userinfo(_, #state{api_url = ApiUrl, access_token = AccessToken} = State) -> +fetch_userinfo(#state{api_url = ApiUrl, access_token = AccessToken} = State) -> {ok,{{"HTTP/1.1",200,"OK"}, _, UserInfoJson}} = httpc:request( get, {ApiUrl ++ "/v1/userinfo", [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, [{autoredirect, false}], [], ?MODULE @@ -192,71 +201,67 @@ fetch_userinfo(_, #state{api_url = ApiUrl, access_token = AccessToken} = State) Info = {["ouraring", "userinfo"], UserInfo}, State#state{infos = [Info | State#state.infos]}. -get_day(State) -> - Key = ["ouraring", "sleep"], +get_day(Type, State) -> + LastDay = get_last_day(Type, State), + Key = ["ouraring", Type], Yesterday = edate:yesterday(), - YDayStr = edate:date_to_string(Yesterday), - Day = case dperl_dal:read_channel(State#state.channel, Key) of ?NOT_FOUND -> - case State#state.last_day of + case LastDay of undefined -> - SDays = ?SHIFT_DAYS(State#state.name), - edate:date_to_string(edate:shift(-1 * SDays, days)); - YDayStr -> - YDayStr; + SDays = ?SHIFT_DAYS(State#state.name), + edate:shift(-1 * SDays, days); + Yesterday -> + Yesterday; LastDay -> - edate:date_to_string(edate:shift(edate:string_to_date(LastDay), 1, days)) + edate:shift(LastDay, 1, days) end; #{<<"_day">> := DayBin} -> DayStr = binary_to_list(DayBin), case {edate:string_to_date(DayStr), Yesterday} of - {D, D} -> DayStr; - {D1, D2} when D1 < D2 -> edate:date_to_string(edate:shift(D1, 1, day)); - {_, Yesterday} -> edate:date_to_string(Yesterday) + {D, D} -> fetched; + {D1, D2} when D1 < D2 -> edate:shift(D1, 1, day); + {_, Yesterday} -> Yesterday end - end, - DayQuery = "?start=" ++ Day ++ "&end=" ++ Day, - {DayQuery, State#state{last_day = Day}}. - -fetch_sleep(DayQuery, #state{api_url = ApiUrl, access_token = AccessToken} = State) -> - {ok,{{"HTTP/1.1",200,"OK"}, _, SleepInfoJson}} = httpc:request( - get, {ApiUrl ++ "/v1/sleep" ++ DayQuery, [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, - [{autoredirect, false}], [], ?MODULE - ), - case imem_json:decode(list_to_binary(SleepInfoJson), [return_maps]) of - #{<<"sleep">> := []} -> - State; - Sleep -> - Info = {["ouraring", "sleep"], Sleep#{<<"_day">> => list_to_binary(State#state.last_day)}}, - State#state{infos = [Info | State#state.infos]} end. -fetch_activity(DayQuery, #state{api_url = ApiUrl, access_token = AccessToken} = State) -> - {ok,{{"HTTP/1.1",200,"OK"}, _, ActivityInfoJson}} = httpc:request( - get, {ApiUrl ++ "/v1/activity" ++ DayQuery, [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, - [{autoredirect, false}], [], ?MODULE - ), - case imem_json:decode(list_to_binary(ActivityInfoJson), [return_maps]) of - #{<<"activity">> := []} -> - State; - Activity -> - Info = {["ouraring", "activity"], Activity#{<<"_day">> => list_to_binary(State#state.last_day)}}, - State#state{infos = [Info | State#state.infos]} +fetch_metric(Type, Day, #state{api_url = ApiUrl, access_token = AccessToken} = State) -> + ?JInfo("Fetching metric for ~s on ~p", [Type, Day]), + {ok,{{"HTTP/1.1",200,"OK"}, _, MetricJson}} = httpc:request( + get, {ApiUrl ++ "/v1/" ++ Type ++ day_query(Day), [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, + [{autoredirect, false}], [], ?MODULE), + TypeBin = list_to_binary(Type), + case imem_json:decode(list_to_binary(MetricJson), [return_maps]) of + #{TypeBin := []} -> + NextDay = next_day(Day), + case NextDay =< edate:yesterday() of + true -> + fetch_metric(Type, NextDay, State); + false -> + State + end; + Metric -> + Info = {["ouraring", Type], Metric#{<<"_day">> => list_to_binary(edate:date_to_string(Day))}}, + set_metric_day(Type, Day, State#state{infos = [Info | State#state.infos]}) end. -fetch_readiness(DayQuery, #state{api_url = ApiUrl, access_token = AccessToken} = State) -> - {ok,{{"HTTP/1.1",200,"OK"}, _, ReadinessJson}} = httpc:request( - get, {ApiUrl ++ "/v1/readiness" ++ DayQuery, [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, - [{autoredirect, false}], [], ?MODULE - ), - case imem_json:decode(list_to_binary(ReadinessJson), [return_maps]) of - #{<<"readiness">> := []} -> - State; - Readiness -> - Info = {["ouraring", "readiness"], Readiness#{<<"_day">> => list_to_binary(State#state.last_day)}}, - State#state{infos = [Info | State#state.infos]} - end. +next_day(Day) when is_list(Day) -> + next_day(edate:string_to_date(Day)); +next_day(Day) when is_tuple(Day) -> + edate:shift(Day, 1, day). + +day_query(Day) when is_tuple(Day) -> + day_query(edate:date_to_string(Day)); +day_query(Day) when is_list(Day) -> + "?start=" ++ Day ++ "&end=" ++ Day. + +get_last_day("sleep", #state{last_sleep_day = LastSleepDay}) -> LastSleepDay; +get_last_day("activity", #state{last_activity_day = LastActivityDay}) -> LastActivityDay; +get_last_day("readiness", #state{last_readiness_day = LastReadinessDay}) -> LastReadinessDay. + +set_metric_day("sleep", Day, State) -> State#state{last_sleep_day = Day}; +set_metric_day("activity", Day, State) -> State#state{last_activity_day = Day}; +set_metric_day("readiness", Day, State) -> State#state{last_readiness_day = Day}. % format_links(Links) -> % lists:map( From 64e7f41d7bd36ca4355451654059789106f16d1d Mon Sep 17 00:00:00 2001 From: shamis Date: Tue, 21 Apr 2020 13:05:39 +0200 Subject: [PATCH 07/72] ourar rinj job complete --- src/dperl/jobs/dperl_ouraring_crawl.erl | 148 ++++++++++++++---------- src/ouraring_crawl.erl | 104 ----------------- 2 files changed, 88 insertions(+), 164 deletions(-) delete mode 100644 src/ouraring_crawl.erl diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index 88dec514..3c192924 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -16,7 +16,8 @@ -record(state, {name, channel, client_id, client_secret, password, email, cb_uri, is_connected = false, access_token, api_url, oauth_url, - last_sleep_day, last_activity_day, last_readiness_day, infos = []}). + last_sleep_day, last_activity_day, last_readiness_day, + infos = [], auth_time, auth_expiry}). % dperl_strategy_scr export -export([connect_check_src/1, get_source_events/2, connect_check_dst/1, @@ -24,22 +25,27 @@ fetch_src/2, fetch_dst/2, delete_dst/2, insert_dst/3, update_dst/3, report_status/3]). +connect_check_src(#state{is_connected = true, auth_expiry = ExpiresIn, auth_time = AuthTime} = State) -> + case imem_datatype:sec_diff(AuthTime) of + Diff when Diff >= (ExpiresIn - 100) -> + % access token will expire in 100 seconds or less + connect_check_src(State#state{is_connected = false}); + _ -> + {ok, State} + end; connect_check_src(#state{is_connected = false, client_id = ClientId, cb_uri = CallbackUri, client_secret = ClientSecret, password = Password, email = Email, oauth_url = OauthUrl} = State) -> - inets:start(httpc, [{profile, ?MODULE}]), - ok = httpc:set_options([{cookies, enabled}], ?MODULE), + httpc:reset_cookies(?MODULE), Url = OauthUrl ++ "/oauth/authorize" ++ "?response_type=code" ++ "&client_id=" ++ ClientId ++ "&redirect_uri=" ++ edoc_lib:escape_uri(CallbackUri) ++ "&scope=email+personal+daily" ++ "&state=" ++ "test", - %io:format(">>>>>>>>>> authorize: ~s~n", [Url]), case httpc:request(get, {Url, []}, [{autoredirect, false}], [], ?MODULE) of {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302, []}} -> RedirectUri = OauthUrl ++ proplists:get_value("location", RespHeader302), - % io:format(">>>>>>>>>> 302 Redirect: ~s~n", [RedirectUri]), {ok, {{"HTTP/1.1",200,"OK"}, RespHeader, _Body}} = httpc:request(get, {RedirectUri, []}, [{autoredirect, false}], [], ?MODULE), SetCookieHeader = proplists:get_value("set-cookie", RespHeader), {match, [XRefCookie]} = re:run(SetCookieHeader, ".*_xsrf=(.*);.*", [{capture, [1], list}]), @@ -52,7 +58,6 @@ connect_check_src(#state{is_connected = false, client_id = ClientId, cb_uri = Ca }, [{autoredirect, false}], [], ?MODULE ), RedirectUri_1 = OauthUrl ++ proplists:get_value("location", RespHeader302_1), - % io:format(">>>>>>>>>> 302 Redirect: ~s~n", [RedirectUri_1]), {ok, {{"HTTP/1.1",200,"OK"}, _, _}} = httpc:request(get, {RedirectUri_1, []}, [{autoredirect, false}], [], ?MODULE), {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302_2, []}} = httpc:request( post, { @@ -65,10 +70,8 @@ connect_check_src(#state{is_connected = false, client_id = ClientId, cb_uri = Ca }, [{autoredirect, false}], [], ?MODULE ), RedirectUri_2 = proplists:get_value("location", RespHeader302_2), - % io:format(">>>>>>>>>> 302 RedirectUri: ~s~n", [RedirectUri_2]), #{query := QueryString} = uri_string:parse(RedirectUri_2), #{"code" := Code} = maps:from_list(uri_string:dissect_query(QueryString)), - % io:format(">>>>>>>>>> Code: ~p~n", [Code]), {ok, {{"HTTP/1.1",200,"OK"}, _, BodyJson}} = httpc:request( post, { OauthUrl ++ "/oauth/token", [], "application/x-www-form-urlencoded", @@ -79,13 +82,13 @@ connect_check_src(#state{is_connected = false, client_id = ClientId, cb_uri = Ca ++ "&client_secret=" ++ ClientSecret }, [{autoredirect, false}], [], ?MODULE ), - #{<<"access_token">> := AccessToken} = Auth = jsx:decode(list_to_binary(BodyJson), [return_maps]), - ?JInfo("Auth is : ~p", [Auth]), - {ok, State#state{is_connected = true, access_token = AccessToken}}; - {ok, {{_, 200, _}, _, Body}} -> - ?JInfo("code : ~p body : ~p", [200, Body]), - ?JInfo("!!!! cookies :~p", [httpc:which_cookies(?MODULE)]), - {error, Body, State} + #{<<"access_token">> := AccessToken, <<"expires_in">> := ExpiresIn} = Auth = jsx:decode(list_to_binary(BodyJson), [return_maps]), + ?JInfo("Authentication successful : ~p", [Auth]), + {ok, State#state{is_connected = true, access_token = AccessToken, + auth_expiry = ExpiresIn, auth_time = imem_meta:time()}}; + Error -> + ?JError("Unexpected response : ~p", [Error]), + {error, invalid_return, State} end; connect_check_src(State) -> {ok, State}. @@ -109,26 +112,17 @@ insert_dst(Key, Val, State) -> report_status(_Key, _Status, _State) -> no_op. do_cleanup(State, _BlkCount) -> - {ok, State1} = connect_check_src(State), - State2 = lists:foldl( - fun(Type, Acc) -> - case get_day(Type, Acc) of - fetched -> Acc; - Day -> - try fetch_metric(Type, Day, Acc) - catch E:C:S -> - ?JError("E : ~p, C : ~p, S : ~p", [E, C, S]), - {ok, Acc1} = connect_check_src(Acc#state{is_connected = false}), - fetch_metric(Type, Day, Acc1) - end - end - end, State1, ["sleep", "activity", "readiness"]), - State3 = fetch_userinfo(State2), - case State2#state.infos of - [] -> - {ok, finish, State3}; - _ -> - {ok, State3} + Types = ["sleep", "activity", "readiness", "userinfo"], + case fetch_metrics(Types, State) of + {ok, State2} -> + case State2#state.infos of + [_] -> + {ok, finish, State2}; + _ -> + {ok, State2} + end; + {error, Error} -> + {error, Error, State#state{is_connected = false}} end. delete_dst(Key, #state{channel = Channel} = State) -> @@ -168,6 +162,8 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel}, ?JInfo("Starting ..."), ChannelBin = dperl_dal:to_binary(Channel), dperl_dal:create_check_channel(ChannelBin), + inets:start(httpc, [{profile, ?MODULE}]), + ok = httpc:set_options([{cookies, enabled}], ?MODULE), {ok, State#state{channel = ChannelBin, client_id = ClientId, client_secret = ClientSecret, password = Password, email = Email, cb_uri = CallbackUri, name = Name, @@ -192,14 +188,57 @@ terminate(Reason, _State) -> httpc:reset_cookies(?MODULE), ?JInfo("terminate ~p", [Reason]). +fetch_metrics([], State) -> {ok, State}; +fetch_metrics(["userinfo" | Types], State) -> + case fetch_userinfo(State) of + {error, Error} -> + {error, Error}; + State1 -> + fetch_metrics(Types, State1) + end; +fetch_metrics([Type | Types], State) -> + case get_day(Type, State) of + fetched -> + fetch_metrics(Types, State); + Day -> + case fetch_metric(Type, Day, State) of + {error, Error} -> + {error, Error}; + State1 -> + fetch_metrics(Types, State1) + end + end. + +fetch_metric(Type, Day, #state{api_url = ApiUrl, access_token = AccessToken} = State) -> + ?JInfo("Fetching metric for ~s on ~p", [Type, Day]), + Url = ApiUrl ++ "/v1/" ++ Type ++ day_query(Day), + TypeBin = list_to_binary(Type), + case exec_req(Url, AccessToken) of + #{TypeBin := []} -> + NextDay = next_day(Day), + case NextDay =< edate:yesterday() of + true -> + fetch_metric(Type, NextDay, State); + false -> + State + end; + Metric when is_map(Metric) -> + Info = {["ouraring", Type], Metric#{<<"_day">> => list_to_binary(edate:date_to_string(Day))}}, + set_metric_day(Type, Day, State#state{infos = [Info | State#state.infos]}); + {error, Error} -> + ?JError("Error fetching ~s for ~p : ~p", [Type, Day, Error]), + {error, Error} + end. + fetch_userinfo(#state{api_url = ApiUrl, access_token = AccessToken} = State) -> - {ok,{{"HTTP/1.1",200,"OK"}, _, UserInfoJson}} = httpc:request( - get, {ApiUrl ++ "/v1/userinfo", [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, - [{autoredirect, false}], [], ?MODULE - ), - UserInfo = imem_json:decode(list_to_binary(UserInfoJson), [return_maps]), - Info = {["ouraring", "userinfo"], UserInfo}, - State#state{infos = [Info | State#state.infos]}. + case exec_req(ApiUrl ++ "/v1/userinfo", AccessToken) of + UserInfo when is_map(UserInfo) -> + Info = {["ouraring", "userinfo"], UserInfo}, + State#state{infos = [Info | State#state.infos]}; + {error, Error} -> + ?JError("Error fetching userinfo : ~p", [Error]), + {error, Error} + end. get_day(Type, State) -> LastDay = get_last_day(Type, State), @@ -225,24 +264,13 @@ get_day(Type, State) -> end end. -fetch_metric(Type, Day, #state{api_url = ApiUrl, access_token = AccessToken} = State) -> - ?JInfo("Fetching metric for ~s on ~p", [Type, Day]), - {ok,{{"HTTP/1.1",200,"OK"}, _, MetricJson}} = httpc:request( - get, {ApiUrl ++ "/v1/" ++ Type ++ day_query(Day), [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, - [{autoredirect, false}], [], ?MODULE), - TypeBin = list_to_binary(Type), - case imem_json:decode(list_to_binary(MetricJson), [return_maps]) of - #{TypeBin := []} -> - NextDay = next_day(Day), - case NextDay =< edate:yesterday() of - true -> - fetch_metric(Type, NextDay, State); - false -> - State - end; - Metric -> - Info = {["ouraring", Type], Metric#{<<"_day">> => list_to_binary(edate:date_to_string(Day))}}, - set_metric_day(Type, Day, State#state{infos = [Info | State#state.infos]}) +exec_req(Url, AccessToken) -> + AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}], + case httpc:request(get, {Url, AuthHeader}, [{autoredirect, false}], [], ?MODULE) of + {ok, {{_, 200, "OK"}, _, Result}} -> + imem_json:decode(list_to_binary(Result), [return_maps]); + Error -> + {error, Error} end. next_day(Day) when is_list(Day) -> diff --git a/src/ouraring_crawl.erl b/src/ouraring_crawl.erl deleted file mode 100644 index 8f237caa..00000000 --- a/src/ouraring_crawl.erl +++ /dev/null @@ -1,104 +0,0 @@ --module(ouraring_crawl). - --export([run/0, run/1]). - --define(OAUTH2_URL_PREFIX, "https://cloud.ouraring.com"). --define(API_URL_PREFIX, "https://api.ouraring.com"). -run() -> - {ok, _} = application:ensure_all_started(inets), - {ok, _} = application:ensure_all_started(ssl), - run(#{ - client_id => "REMERNOADFFDIN3O", - client_secret => "HYJEW2WTIVIEXQNOTPDN7Y346GYSLNL3", - cb_uri => "https://127.0.0.1:8443/callback", - user_email => "max.ochsenbein@k2informatics.ch", - user_password => "cFMMax--XG$k2sa", - state => "any+value+as+state" - }). - -run(#{ - client_id := ClientId, - client_secret := ClientSecret, - cb_uri := CallbackUri, - user_email := Email, - user_password := Password, - state := State -}) -> - inets:stop(httpc, ?MODULE), - {ok, _} = inets:start(httpc, [{profile, ?MODULE}]), - ok = httpc:set_options([{cookies, enabled}], ?MODULE), - Url = ?OAUTH2_URL_PREFIX ++ "/oauth/authorize" - ++ "?response_type=code" - ++ "&client_id=" ++ ClientId - ++ "&redirect_uri=" ++ edoc_lib:escape_uri(CallbackUri) - ++ "&scope=email+personal+daily" - ++ "&state=" ++ State, - %io:format(">>>>>>>>>> authorize: ~s~n", [Url]), - {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302, []}} = httpc:request(get, {Url, []}, [{autoredirect, false}], [], ?MODULE), - RedirectUri = ?OAUTH2_URL_PREFIX ++ proplists:get_value("location", RespHeader302), - % io:format(">>>>>>>>>> 302 Redirect: ~s~n", [RedirectUri]), - {ok, {{"HTTP/1.1",200,"OK"}, RespHeader, _Body}} = httpc:request(get, {RedirectUri, []}, [{autoredirect, false}], [], ?MODULE), - SetCookieHeader = proplists:get_value("set-cookie", RespHeader), - {match, [XRefCookie]} = re:run(SetCookieHeader, ".*_xsrf=(.*);.*", [{capture, [1], list}]), - {ok,{{"HTTP/1.1",302,"Found"}, RespHeader302_1, []}} = httpc:request( - post, { - RedirectUri, [], "application/x-www-form-urlencoded", - "_xsrf="++edoc_lib:escape_uri(XRefCookie) - ++ "&email=" ++ edoc_lib:escape_uri(Email) - ++ "&password=" ++ edoc_lib:escape_uri(Password) - }, [{autoredirect, false}], [], ?MODULE - ), - RedirectUri_1 = ?OAUTH2_URL_PREFIX ++ proplists:get_value("location", RespHeader302_1), - % io:format(">>>>>>>>>> 302 Redirect: ~s~n", [RedirectUri_1]), - {ok, {{"HTTP/1.1",200,"OK"}, _, _}} = httpc:request(get, {RedirectUri_1, []}, [{autoredirect, false}], [], ?MODULE), - {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302_2, []}} = httpc:request( - post, { - RedirectUri_1, [], "application/x-www-form-urlencoded", - "_xsrf="++edoc_lib:escape_uri(XRefCookie) - ++ "&scope_email=on" - ++ "&scope_personal=on" - ++ "&scope_daily=on" - ++ "&allow=Accept" - }, [{autoredirect, false}], [], ?MODULE - ), - RedirectUri_2 = proplists:get_value("location", RespHeader302_2), - % io:format(">>>>>>>>>> 302 RedirectUri: ~s~n", [RedirectUri_2]), - #{query := QueryString} = uri_string:parse(RedirectUri_2), - #{"code" := Code} = maps:from_list(uri_string:dissect_query(QueryString)), - % io:format(">>>>>>>>>> Code: ~p~n", [Code]), - {ok, {{"HTTP/1.1",200,"OK"}, _, BodyJson}} = httpc:request( - post, { - ?OAUTH2_URL_PREFIX ++ "/oauth/token", [], "application/x-www-form-urlencoded", - "grant_type=authorization_code" - ++ "&code=" ++ Code - ++ "&redirect_uri=" ++ edoc_lib:escape_uri(CallbackUri) - ++ "&client_id=" ++ ClientId - ++ "&client_secret=" ++ ClientSecret - }, [{autoredirect, false}], [], ?MODULE - ), - #{<<"access_token">> := AccessToken} = Auth = jsx:decode(list_to_binary(BodyJson), [return_maps]), - io:format("Auth ~p~n", [Auth]), - io:format("-----~nUserInfo :~n~p~n-----~n", [userinfo(AccessToken)]), - io:format("-----~nSleep :~n~p~n-----~n", [sleep(AccessToken)]), - io:format("-----~nActivity :~n~p~n-- Activity --~n", [activity(AccessToken)]). - -userinfo(AccessToken) -> - {ok,{{"HTTP/1.1",200,"OK"}, _, UserInfoJson}} = httpc:request( - get, {?API_URL_PREFIX ++ "/v1/userinfo", [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, - [{autoredirect, false}], [], ?MODULE - ), - jsx:decode(list_to_binary(UserInfoJson), [return_maps]). - -sleep(AccessToken) -> - {ok,{{"HTTP/1.1",200,"OK"}, _, SleepInfoJson}} = httpc:request( - get, {?API_URL_PREFIX ++ "/v1/sleep", [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, - [{autoredirect, false}], [], ?MODULE - ), - jsx:decode(list_to_binary(SleepInfoJson), [return_maps]). - -activity(AccessToken) -> - {ok,{{"HTTP/1.1",200,"OK"}, _, ActivityInfoJson}} = httpc:request( - get, {?API_URL_PREFIX ++ "/v1/activity", [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, - [{autoredirect, false}], [], ?MODULE - ), - jsx:decode(list_to_binary(ActivityInfoJson), [return_maps]). From de76bfb9ab55ff8c33608b71c6dd0f3f6c46d621 Mon Sep 17 00:00:00 2001 From: shamis Date: Tue, 28 Apr 2020 11:33:39 +0200 Subject: [PATCH 08/72] removed services --- src/dperl/services/dperl_service_oracle.erl | 703 -------------------- src/dperl/services/dperl_service_oracle.hrl | 215 ------ 2 files changed, 918 deletions(-) delete mode 100644 src/dperl/services/dperl_service_oracle.erl delete mode 100644 src/dperl/services/dperl_service_oracle.hrl diff --git a/src/dperl/services/dperl_service_oracle.erl b/src/dperl/services/dperl_service_oracle.erl deleted file mode 100644 index b694309b..00000000 --- a/src/dperl/services/dperl_service_oracle.erl +++ /dev/null @@ -1,703 +0,0 @@ --module(dperl_service_oracle). - --include("dperl_service_oracle.hrl"). - --behavior(dperl_worker). --behavior(cowboy_middleware). --behavior(cowboy_loop). - -% dperl_worker exports --export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, - code_change/3, format_status/2, get_status/1, init_state/1]). - --record(state, {name, resource, baseUrl, active_link = 1, stmt_profile_del, - stmt_ts_get, stmt_ts_post, stmt_ts_put, stmt_ts_reset, - stmt_ts_revert, stmt_bar_get, stmt_bar_put, stmt_bar_post, - pool, c_whitelist = #{}, listeners = [], tenants = #{}, port, - close_statement = [], close_session = [], dynstate = #{}}). - -% cowboy rest exports --export([init/2, info/3, execute/2]). - --define(API_VERSION, "1.0.0"). - -init_state(_) -> #state{}. - -get_status(#state{dynstate = DynState}) -> DynState. - -init({#dperlService{name = SName, args = Args, resource = Resource, - interface = Interface}, - State}) -> - case init_resources( - Resource, - State#state{name = SName, resource = Resource, - dynstate = #{start => imem_meta:time(), req => 0, - error => #{}}}) of - {ok, State1} -> - Interface1 = maps:merge(Interface, Args), - case init_interface(Interface1, State1) of - {ok, State2} -> - erlang:send_after(?SERVICE_UPDATE_PERIOD(SName), self(), update_dyn), - erlang:send_after(?SERVICE_STATUS_RESET_PERIOD(SName), self(), reset_dyn), - dperl_dal:update_service_dyn( - State2#state.name, State2#state.dynstate, - ?SERVICE_ACTIVE_THRESHOLD(SName), ?SERVICE_OVERLOAD_THRESHOLD(SName)), - {ok, State2}; - Error -> - erlocipool:del(State1#state.pool), - {stop, Error} - end; - Error -> {stop, Error} - end; -init({Args, _}) -> - ?SError("bad start parameters ~p", [Args]), - {stop, badarg}. - -handle_call(Request, _From, State) -> - ?SWarn("Unsupported handle_call ~p", [Request]), - {reply, ok, State}. - -% common -handle_cast(#{operation := {profile, delete}, reply := RespPid, - msisdn := Msisdn, tenant := Tenant, - data := #{<<"comment">> := Comment, - <<"requestor">> := Requestor}}, - #state{stmt_profile_del = ProfileDelStmt} = State) - when byte_size(Comment) =< ?MAX_COMMENT_LENGTH, - byte_size(Requestor) =< ?MAX_REQUESTOR_LENGTH -> - Resp = db_call({stmt_profile_del, ProfileDelStmt}, - [Tenant, Msisdn, Requestor, Comment]), - RespPid ! {reply, Resp}, - {noreply, State}; -%% topstopper -handle_cast(#{operation := {topstopper, get}, reply := RespPid, - msisdn := Msisdn, tenant := Tenant}, State) -> - Resp = db_call({stmt_ts_get, State#state.stmt_ts_get}, [Tenant, Msisdn]), - RespPid ! {reply, Resp}, - {noreply, State}; -handle_cast(#{operation := {topstopper, post}, reply := RespPid, - msisdn := Msisdn, tenant := Tenant, - data := #{<<"comment">> := Comment, - <<"requestor">> := Requestor}}, - #state{stmt_ts_post = TsPostStmt} = State) - when byte_size(Comment) =< ?MAX_COMMENT_LENGTH, - byte_size(Requestor) =< ?MAX_REQUESTOR_LENGTH -> - Resp = db_call({stmt_ts_post, TsPostStmt}, [Tenant, Msisdn, Requestor, - Comment]), - RespPid ! {reply, Resp}, - {noreply, State}; -handle_cast(#{operation := {topstopper, put}, reply := RespPid, - msisdn := Msisdn, tenant := Tenant, - data := #{<<"comment">> := Comment, - <<"requestor">> := Requestor, - <<"type">> := Type, - <<"limit">> := Limit}}, - #state{stmt_ts_put = TsPutStmt} = State) - when byte_size(Comment) =< ?MAX_COMMENT_LENGTH andalso - byte_size(Requestor) =< ?MAX_REQUESTOR_LENGTH andalso - (Type == <<"swisscom">> orelse Type == <<"customer">>) andalso - (Limit >= -1 andalso Limit =< 100000) -> - Resp = db_call({stmt_ts_put, TsPutStmt}, - [Tenant, Msisdn, Type, - dderloci_utils:oranumber_encode( - list_to_binary(io_lib:format("~p", [Limit]))), - Requestor, Comment]), - RespPid ! {reply, Resp}, - {noreply, State}; -handle_cast(#{operation := {topstopper, reset}, reply := RespPid, - msisdn := Msisdn, tenant := Tenant, - data := #{<<"comment">> := Comment, - <<"requestor">> := Requestor, - <<"type">> := Type}}, - #state{stmt_ts_reset = TsResetStmt} = State) - when byte_size(Comment) =< ?MAX_COMMENT_LENGTH andalso - byte_size(Requestor) =< ?MAX_REQUESTOR_LENGTH andalso - (Type == <<"swisscom">> orelse Type == <<"customer">>) -> - Resp = db_call({stmt_ts_reset, TsResetStmt}, [Tenant, Msisdn, Type, - Requestor, Comment]), - RespPid ! {reply, Resp}, - {noreply, State}; -handle_cast(#{operation := {topstopper, revert}, reply := RespPid, - msisdn := Msisdn, tenant := Tenant, - data := #{<<"comment">> := Comment, - <<"requestor">> := Requestor, - <<"type">> := Type}}, - #state{stmt_ts_revert = TsRevertStmt} = State) - when byte_size(Comment) =< ?MAX_COMMENT_LENGTH andalso - byte_size(Requestor) =< ?MAX_REQUESTOR_LENGTH andalso - (Type == <<"swisscom">> orelse Type == <<"customer">>) -> - Resp = db_call({stmt_ts_revert, TsRevertStmt}, [Tenant, Msisdn, Type, - Requestor, Comment]), - RespPid ! {reply, Resp}, - {noreply, State}; -%% barring -handle_cast(#{operation := {barring, get}, reply := RespPid, - msisdn := Msisdn, tenant := Tenant}, State) -> - Resp = db_call({stmt_bar_get, State#state.stmt_bar_get}, [Tenant, Msisdn]), - RespPid ! {reply, Resp}, - {noreply, State}; -handle_cast(#{operation := {barring, post}, reply := RespPid, - msisdn := Msisdn, tenant := Tenant, - data := #{<<"comment">> := Comment, - <<"requestor">> := Requestor}}, - #state{stmt_bar_post = BarPostStmt} = State) - when byte_size(Comment) =< ?MAX_COMMENT_LENGTH, - byte_size(Requestor) =< ?MAX_REQUESTOR_LENGTH -> - Resp = db_call({stmt_bar_post, BarPostStmt}, [Tenant, Msisdn, Comment, Requestor]), - RespPid ! {reply, Resp}, - {noreply, State}; -handle_cast(#{operation := {barring, put}, reply := RespPid, - msisdn := Msisdn, tenant := Tenant, - data := #{<<"type">> := Type, <<"barring">> := Barring, - <<"comment">> := Comment, <<"requestor">> := Requestor}}, - #state{stmt_bar_put = BarPutStmt} = State) - when byte_size(Comment) =< ?MAX_COMMENT_LENGTH andalso - byte_size(Requestor) =< ?MAX_REQUESTOR_LENGTH andalso - (Type == <<"swisscom">> orelse Type == <<"customer">>) andalso - (Barring == 0 orelse Barring == 6 orelse Barring == 9) -> - Resp = db_call({stmt_bar_put, BarPutStmt}, [Tenant, Msisdn, Type, Barring, Comment, Requestor]), - RespPid ! {reply, Resp}, - {noreply, State}; -%% -handle_cast(#{reply := RespPid}, State) -> - RespPid ! {reply, bad_req}, - {noreply, State}; -handle_cast(Request, State) -> - ?SWarn("Unsupported handle_cast ~p", [Request]), - {noreply, State}. - --define(STMT_REBUILD(__SQL, __BINDS, __POOL, __STMT, __STATE), - (__STATE#state.__STMT):close(), - __STATE#state{__STMT = create_stmt(__POOL, __SQL, __BINDS)}). - -handle_info(update_dyn, #state{dynstate = Ds, name = SName} = State) -> - dperl_dal:update_service_dyn( - State#state.name, Ds, ?SERVICE_ACTIVE_THRESHOLD(SName), - ?SERVICE_OVERLOAD_THRESHOLD(SName)), - erlang:send_after(?SERVICE_UPDATE_PERIOD(SName), self(), update_dyn), - {noreply, State}; -handle_info(reset_dyn, #state{name = SName} = State) -> - NewDynState = (State#state.dynstate)#{req => 0, error => #{}}, - dperl_dal:update_service_dyn( - State#state.name, NewDynState, ?SERVICE_ACTIVE_THRESHOLD(SName), - ?SERVICE_OVERLOAD_THRESHOLD(SName)), - erlang:send_after(?SERVICE_STATUS_RESET_PERIOD(SName), self(), reset_dyn), - {noreply, State#state{dynstate = NewDynState}}; -handle_info({error, StmtType, Code, Message}, - #state{close_session = CloseSessionsErrors, listeners = Listeners, - close_statement = CloseStatementErrors, port = Port, - name = SName, pool = Pool} = State) -> - case lists:member(Code, CloseStatementErrors) of - true -> - ?SError("statement rebuild : ORA-~p ~s", [Code, Message]), - {noreply, - case StmtType of - stmt_profile_del -> - ?STMT_REBUILD(?PROFILE_DELETE_SQL, - ?PROFILE_DELETE_TOPSTOPPER_POST_BINDS, - Pool, stmt_profile_del, State); - stmt_ts_get -> - ?STMT_REBUILD(?TOPSTOPPER_GET_SQL, - ?TOPSTOPPER_BARRING_GET_BINDS, - Pool, stmt_ts_get, State); - stmt_ts_post -> - ?STMT_REBUILD(?TOPSTOPPER_POST_SQL, - ?PROFILE_DELETE_TOPSTOPPER_POST_BINDS, - Pool, stmt_ts_post, State); - stmt_ts_put -> - ?STMT_REBUILD(?TOPSTOPPER_PUT_SQL, - ?TOPSTOPPER_PUT_BINDS, - Pool, stmt_ts_put, State); - stmt_ts_reset -> - ?STMT_REBUILD(?TOPSTOPPER_RESET_SQL, - ?TOPSTOPPER_RESET_REVERT_BINDS, - Pool, stmt_ts_reset, State); - stmt_ts_revert -> - ?STMT_REBUILD(?TOPSTOPPER_REVERT_SQL, - ?TOPSTOPPER_RESET_REVERT_BINDS, - Pool, stmt_ts_revert, State); - stmt_bar_get -> - ?STMT_REBUILD(?BARRING_GET_SQL, - ?TOPSTOPPER_BARRING_GET_BINDS, - Pool, stmt_bar_get, State); - stmt_bar_post -> - ?STMT_REBUILD(?BARRING_POST_SQL, - ?BARRING_POST_BINDS, - Pool, stmt_bar_post, State); - stmt_bar_put -> - ?STMT_REBUILD(?BARRING_PUT_SQL, - ?BARRING_PUT_BINDS, - Pool, stmt_bar_put, State) - end}; - _ -> - case lists:member(Code, CloseSessionsErrors) of - true -> - ?SError("pool restart ORA-~p ~s", [Code, Message]), - lists:map( - fun(Ip) -> - case catch ranch:set_max_connections( - {Ip, Port}, 0) of - ok -> ok; - Error -> - ?SError("stop accept ~p on port ~p : ~p", - [Ip, Port, Error]) - end - end, Listeners), - erlocipool:del(Pool), - case init_resources(State#state.resource, State) of - {ok, State1} -> - lists:map( - fun(Ip) -> - case catch ranch:set_max_connections( - {Ip, Port}, ?SERVICE_MAXCONNS(SName)) of - ok -> ok; - Error -> - ?SError("start accept ~p on port ~p : ~p", - [Ip, Port, Error]) - end - end, Listeners), - {noreply, State1}; - Error -> {stop, {resource, Error}, State} - end; - _ -> - ?SError("Unhandled ~p : ~s", [Code, Message]), - {noreply, State} - end - end; -handle_info(count_request, #state{dynstate = Ds} = State) -> - NewDs = Ds#{req => maps:get(req, Ds, 0) + 1}, - {noreply, State#state{dynstate = NewDs}}; -handle_info({count_error, HttpRInt}, - #state{dynstate = #{error := Error} = Ds} = State) -> - NewDs = Ds#{error => Error#{HttpRInt => maps:get(HttpRInt, Error, 0) + 1}}, - {noreply, State#state{dynstate = NewDs}}; -handle_info(stop, State) -> - {stop, normal, State}; -handle_info(Request, State) -> - ?SWarn("Unsupported handle_info ~p", [Request]), - {noreply, State}. - -terminate(Reason, #state{listeners = Listeners, port = Port, pool = Pool}) -> - erlocipool:del(Pool), - stop_listeners(Reason, Listeners, Port), - ?SInfo("terminate ~p", [Reason]). - -code_change(OldVsn, State, Extra) -> - ?SInfo("code_change ~p: ~p", [OldVsn, Extra]), - {ok, State}. - -format_status(Opt, [PDict, State]) -> - ?SInfo("format_status ~p: ~p", [Opt, PDict]), - State. - -db_call({StmtType, Stmt}, Params) -> - self() ! count_request, - case Stmt:exec_stmt([list_to_tuple([?RESP_BUFFER|Params])]) of - {executed, _, [{_,<<"{\"errorCode\":",_:8,HttpR:3/binary,_/binary>>=Resp}]} -> - HttpRInt = binary_to_integer(HttpR), - if HttpRInt >= 400 andalso HttpRInt < 500 -> - ?SInfo("~p: ~s", [HttpRInt, Resp]); - HttpRInt >= 500 -> - ?SWarn("~p: ~s", [HttpRInt, Resp]) - end, - self() ! {count_error, HttpRInt}, - {HttpRInt, Resp}; - {executed, _, [{_,Resp}]} -> {200, Resp}; - {error, {Code, Message}} -> - self() ! {error, StmtType, Code, Message}, - {500, - #{errorCode => 2500, - errorMessage => <<"Internal Server Error">>, - errorDetails => Message}}; - {error, Reason} -> - ?SError("~p (~p) : ~p", [StmtType, Params, Reason]), - self() ! stop, - {500, - #{errorCode => 2500, - errorMessage => <<"Internal Server Error">>, - errorDetails => <<"See server error logs for details">>}} - end. - -init_interface(#{baseUrl := BaseUrl, commonWhitelist := CWhiteList, - listenerAddresses := LAddresses, tenants := Tenants, - port := Port, ssl := #{cert := Cert, key := Key}} = Intf, - #state{name = SName} = State) -> - MaxAcceptors = maps:get(max_acceptors, Intf, ?SERVICE_MAXACCEPTORS(SName)), - MaxConnections = maps:get(max_connections, Intf, ?SERVICE_MAXCONNS(SName)), - Opts = #{resource => self(), whitelist => maps:keys(CWhiteList), - tenants => Tenants, name => State#state.name}, - FilteredListenerIps = local_ips(LAddresses), - Base = - case hd(BaseUrl) of - $/ -> BaseUrl; - _ -> "/" ++ BaseUrl - end, - try - lists:map(fun(Ip) -> - Dispatch = - cowboy_router:compile( - [{'_', - [{Base++"/swagger/", ?MODULE, {swagger, Base, SName}}, - {Base++"/swagger/brand.json", cowboy_static, {priv_file, dperl, "brand.json"}}, - {Base++"/swagger/swisscom.png", cowboy_static, {priv_file, dperl, "swisscom.png"}}, - {Base++"/swagger/[...]", cowboy_static, {swagger_static, SName}}, - {Base++"/" ++ ?SERVICE_PROBE_URL(SName), ?MODULE, {'$probe', SName}}, - {Base, ?MODULE, {spec, SName}}, - {Base++"/:class/:msisdn", [{class, fun class_constraint/2}], ?MODULE, Opts}, - {Base++"/:msisdn", ?MODULE, Opts} - ]}] - ), - TransOpts = [{ip, Ip}, {port, Port}, - {num_acceptors, MaxAcceptors}, - {max_connections, MaxConnections}, - {versions, ['tlsv1.2','tlsv1.1',tlsv1]} - | imem_server:get_cert_key(Cert) - ++ imem_server:get_cert_key(Key)], - ProtoOpts = #{env => #{dispatch => Dispatch}, - middlewares => [cowboy_router, ?MODULE, cowboy_handler], - stream_handlers => [cowboy_compress_h, cowboy_stream_h]}, - {ok, P} = cowboy:start_tls({Ip, Port}, TransOpts, ProtoOpts), - ?SInfo("[~p] Activated https://~s:~p~s", - [P, inet:ntoa(Ip), Port, Base]) - end, FilteredListenerIps), - {ok, State#state{listeners = FilteredListenerIps, port = Port}} - catch - error:{badmatch,{error,{already_started,_}}} = Error:Stacktrace -> - ?SError("error:~p~n~p", [Error, Stacktrace]), - stop_listeners(Error, FilteredListenerIps, Port), - init_interface(Intf, State); - Class:Error:Stacktrace -> - ?SError("~p:~p~n~p", [Class, Error, Stacktrace]), - {error, Error} - end. - -class_constraint(format_error, Value) -> io_lib:format("The class ~p is not an valid.", [Value]); -class_constraint(_Type, <<"topstopper">>) -> {ok, topstopper}; -class_constraint(_Type, <<"barring">>) -> {ok, barring}; -class_constraint(_Type, _) -> {error, not_valid}. - -stop_listeners(Reason, Listerns, Port) -> - lists:map( - fun(Ip) -> - case catch cowboy:stop_listener({Ip, Port}) of - ok -> ok; - Error -> - ?SError("[~p] stopping listener ~p on port ~p : ~p", - [Reason, Ip, Port, Error]) - end - end, Listerns). - -local_ips(ListenerAddresses) -> - IntfIps = dderl:local_ipv4s(), - maps:fold( - fun({_, _, _, _} = Ip, _, Acc) -> - case lists:member(Ip, IntfIps) of - true -> [Ip | Acc]; - false -> Acc - end; - (_, _, Acc) -> Acc - end, [], ListenerAddresses - ). - -init_resources(#{credential := #{user := _User, password := _Password}, - links := Links} = _Resources, - #state{active_link = ActiveLink} = _State) -> - #{opt := _Opts, tns := _TNS} = lists:nth(ActiveLink, Links), - {error, unimplemented}. - %% TODO : reimplement without erlocipool - %Options = dperl_dal:oci_opts(?ERLOCIPOOL_LOG_CB, Opts), - %Pool = dperl_dal:get_pool_name(Resources, State#state.name), - %case proplists:get_value(sess_min, Options) of - % 0 -> {error, invalid_dbconn_pool_size}; - % _ -> - % case erlocipool:new(Pool, TNS, User, Password, Options) of - % {ok, _PoolPid} -> - % try - % CloseStatementErrors = maps:get(close_statement, - % Resources, []), - % CloseSessionsErrors = maps:get(close_session, - % Resources, []), - % if is_list(CloseStatementErrors) andalso - % is_list(CloseSessionsErrors) -> - % {ok, - % State#state{ - % stmt_profile_del - % = create_stmt(Pool, ?PROFILE_DELETE_SQL, - % ?PROFILE_DELETE_TOPSTOPPER_POST_BINDS), - % stmt_ts_get - % = create_stmt(Pool, ?TOPSTOPPER_GET_SQL, - % ?TOPSTOPPER_BARRING_GET_BINDS), - % stmt_ts_post - % = create_stmt(Pool, ?TOPSTOPPER_POST_SQL, - % ?PROFILE_DELETE_TOPSTOPPER_POST_BINDS), - % stmt_ts_put - % = create_stmt(Pool, ?TOPSTOPPER_PUT_SQL, - % ?TOPSTOPPER_PUT_BINDS), - % stmt_ts_reset - % = create_stmt(Pool, ?TOPSTOPPER_RESET_SQL, - % ?TOPSTOPPER_RESET_REVERT_BINDS), - % stmt_ts_revert - % = create_stmt(Pool, ?TOPSTOPPER_REVERT_SQL, - % ?TOPSTOPPER_RESET_REVERT_BINDS), - % stmt_bar_get - % = create_stmt(Pool, ?BARRING_GET_SQL, - % ?TOPSTOPPER_BARRING_GET_BINDS), - % stmt_bar_post - % = create_stmt(Pool, ?BARRING_POST_SQL, - % ?BARRING_POST_BINDS), - % stmt_bar_put - % = create_stmt(Pool, ?BARRING_PUT_SQL, - % ?BARRING_PUT_BINDS), - % pool = Pool, close_statement = CloseStatementErrors, - % close_session = CloseSessionsErrors}}; - % true -> - % {badarg, [close_statement, close_session]} - % end - % catch - % Class:Error -> - % ?SError("create statements failed : ~p. Deleing pool.", [{Class, Error}]), - % erlocipool:del(Pool), - % {Class, Error} - % end; - % {error, {already_started,_} = Error} -> - % ?SError("Pool ~p exists, restarting...", [Pool]), - % erlocipool:del(Pool), - % {error, Error}; - % {error, Error} -> - % erlocipool:del(Pool), - % init_resources( - % Resources, - % State#state{active_link = - % if length(Links) > ActiveLink -> - % ?SWarn("Pool ~p start : ", - % [Pool, Error]), - % ActiveLink + 1; - % true -> - % ?SError("Pool ~p start : ", - % [Pool, Error]), - % 1 - % end}) - % end - %end. - --spec create_stmt(atom(), binary(), list()) -> tuple(). -create_stmt(_Pool, _Sql, _Binds) -> - {error, unimplemented}. - %% TODO - % ?OciStmt(Pool, Sql, Binds, Stmt), - % Stmt. - -%% -%% Cowboy REST resource -%% - --define(SERVER, "dperl AAA"). --define(SPEC_FILE, "aaa.json"). --include_lib("dderl/src/dderl_rest.hrl"). - --define(E2400, - #{errorCode => 2400, - errorMessage => <<"Missing body">>, - errorDetails => <<"Missing request payload">>}). --define(E1404, - #{errorCode => 1404, - errorMessage => <<"Not Found">>, - errorDetails => <<"Ressource not found, no AAA-Profile exists." - " Consider creating a default profile with" - " POST">>}). --define(E1405, - #{errorCode => 1405, - errorMessage => <<"Method Not Allowed">>, - errorDetails => <<"HTTP method isn't allowed on this resource">>}). - --define(E1403, - #{errorCode => 1403, - errorMessage => <<"Forbidden">>, - errorDetails => <<"No access to the requested service">>}). - --define(JSON(__BODY), imem_json:encode(__BODY)). - -% applying Swagger whitelist through middleware -execute(Req, Env) -> - case maps:get(handler_opts, Env, none) of - {swagger_static, SName} -> - apply_swagger_whitelist( - Req, SName, Env#{handler_opts => {priv_dir, dderl, "public/swagger"}}); - {swagger, _, SName} -> apply_swagger_whitelist(Req, SName, Env); - {spec, SName} -> apply_swagger_whitelist(Req, SName, Env); - _ -> {ok, Req, Env} - end. - -apply_swagger_whitelist(Req, SName, Env) -> - {Ip, _Port} = cowboy_req:peer(Req), - case ?SWAGGER_WHITELIST(SName) of - #{Ip := _} -> {ok, Req, Env}; - WL when map_size(WL) == 0 -> {ok, Req, Env}; - _ -> - Req1 = cowboy_req:reply(403, ?REPLY_JSON_HEADERS, - ?JSON(?E1403), Req), - {stop, Req1} - end. - -init(Req, {'$probe', SName}) -> - {Code, Resp} = ?SERVICE_PROBE_RESP(SName), - Req1 = cowboy_req:reply(Code, ?REPLY_HEADERS, Resp, Req), - {ok, Req1, undefined}; -init(Req, {swagger, Base, _SName}) -> - Url = iolist_to_binary(cowboy_req:uri(Req)), - LastAt = byte_size(Url) - 1, - Req1 = - cowboy_req:reply( - 302, #{<<"cache-control">> => <<"no-cache">>, - <<"pragma">> => <<"no-cache">>, - <<"location">> => - list_to_binary( - [Url, case Url of - <<_:LastAt/binary, "/">> -> ""; - _ -> "/" - end, - "index.html?url="++ Base])}, - <<"Redirecting...">>, Req), - {ok, Req1, #state{}}; -init(Req, {spec, _SName}) -> - Req1 = - case cowboy_req:method(Req) of - <<"GET">> -> - {ok, Content} = file:read_file( - filename:join(dderl:priv_dir(dperl), - ?SPEC_FILE)), - cowboy_req:reply(200, ?REPLY_JSON_SPEC_HEADERS, Content, Req); - <<"OPTIONS">> -> - ACRHS = cowboy_req:header(<<"access-control-request-headers">>, Req), - cowboy_req:reply(200, - maps:merge(#{<<"allow">> => <<"GET,OPTIONS">>, - <<"access-control-allow-headers">> => ACRHS}, - ?REPLY_OPT_HEADERS), <<>>, Req); - Method-> - ?Error("~p not supported", [Method]), - cowboy_req:reply(405, ?REPLY_JSON_HEADERS, ?JSON(?E1405), Req) - end, - {ok, Req1, #state{}}; -init(Req0, #{whitelist := CWhiteList, tenants := Tenants, - name := ServiceName} = Opts)-> - put(name, ServiceName), - Req = Req0#{req_time => os:timestamp()}, - {Ip, _Port} = cowboy_req:peer(Req), - IpStr = inet:ntoa(Ip), - case cowboy_req:parse_header(<<"authorization">>, Req) of - {basic, Tenant, Password} -> - case Tenants of % user:password authorization check - #{Tenant := #{password := Password, permissions := Permissions, - whitelist := TWhiteLists}} -> - % whitelists check - case {is_ip_allowed(Ip, maps:keys(TWhiteLists)), - is_ip_allowed(Ip, CWhiteList)} of - {false, false} -> - ?SError("~s (~s) is not in tenants' whitelist", - [IpStr, Tenant]), - unauthorized(Req); - _ -> - Class = cowboy_req:binding(class, Req, <<>>), - Msisdn = cowboy_req:binding(msisdn, Req, <<>>), - Op = cowboy_req:method(Req), - Operation = get_operation(Class, Op, Req), - % operation permission check - case Permissions of - #{Operation := _} -> - push_request(Operation, Msisdn, Tenant, Req, Opts); - _ -> - ?SError("~s (~s) operation ~p not authorized", - [IpStr, Tenant, Operation]), - unauthorized(Req) - end - end; - Tenants -> - ?SError("~s (~s:~s) is not configured in tenants", - [IpStr, Tenant, Password]), - unauthorized(Req) - end; - Auth -> - ?SError("~s provided unsupported or bad authorization ~p", [IpStr, Auth]), - unauthorized(Req) - end. - --spec is_ip_allowed(tuple(), list()) -> true | false. -is_ip_allowed(_Ip, []) -> true; -is_ip_allowed(Ip, WhiteList) -> lists:member(Ip, WhiteList). - -unauthorized(Req) -> - Req1 = cowboy_req:reply(403, ?REPLY_JSON_HEADERS, ?JSON(?E1403), Req), - {ok, Req1, undefined}. - -get_operation(<<>>, <<"DELETE">>, _Req) -> {profile, delete}; -get_operation(barring, <<"GET">>, _Req) -> {barring, get}; -get_operation(barring, <<"POST">>, _Req) -> {barring, post}; -get_operation(barring, <<"PUT">>, _Req) -> {barring, put}; -get_operation(topstopper, <<"GET">>, _Req) -> {topstopper, get}; -get_operation(topstopper, <<"PATCH">>, Req) -> - case cowboy_req:match_qs([{action, [], none}], Req) of - #{action := <<"reset">>} -> {topstopper, reset}; - #{action := <<"revert">>} -> {topstopper, revert}; - #{action := Other} -> {topstopper, Other} - end; -get_operation(topstopper, <<"POST">>, _Req) -> {topstopper, post}; -get_operation(topstopper, <<"PUT">>, _Req) -> {topstopper, put}; -get_operation(Class, Op, _) -> {Class, Op}. - -push_request({_, Op} = Operation, Msisdn, Tenant, Req0, #{resource := Service} = Opts) -> - CastReq = #{reply => self(), operation => Operation, - msisdn => Msisdn, tenant => Tenant}, - Req = Req0#{db_call => os:timestamp()}, - case cowboy_req:has_body(Req) of - false when Op == put; Op == post; Op == patch; Op == delete -> - Req1 = cowboy_req:reply(400, ?REPLY_JSON_HEADERS, ?JSON(?E2400), Req), - {ok, Req1, undefined}; - false when Op == get -> - ok = gen_server:cast(Service, CastReq), - {cowboy_loop, Req, Opts, hibernate}; - true -> - {ok, Body, Req1} = cowboy_req:read_body(Req), - case catch imem_json:decode(Body, [return_maps]) of - {'EXIT', Error} -> - ?SError("~p malformed with ~s : ~p", [Operation, Body, Error]), - Req2 = cowboy_req:reply(400, ?REPLY_JSON_HEADERS, ?JSON(?E2400), Req1), - {ok, Req2, undefined}; - BodyMap -> - ok = gen_server:cast(Service, CastReq#{data => BodyMap}), - {cowboy_loop, Req1, Opts, hibernate} - end; - HasBody -> - ?SError("~p with body ~p is not supported", [Operation, HasBody]), - Req1 = cowboy_req:reply(400, ?REPLY_JSON_HEADERS, ?JSON(?E2400), Req), - {ok, Req1, undefined} - end. - -info({reply, bad_req}, Req, State) -> info({reply, {400, <<>>}}, Req, State); -info({reply, not_found}, Req, State) -> - info({reply, {404, ?JSON(?E1404)}}, Req, State); -info({reply, {Code, Body}}, Req, State) when is_integer(Code), is_map(Body) -> - info({reply, {Code, imem_json:encode(Body)}}, Req, State); -info({reply, {Code, Body}}, Req, State) when is_integer(Code), is_binary(Body) -> - Req1 = cowboy_req:reply(Code, ?REPLY_JSON_HEADERS, Body, Req), - ReqTime = maps:get(req_time, Req1), - DbCall = maps:get(db_call, Req1), - Now = os:timestamp(), - Total = timer:now_diff(Now, ReqTime), - if Total > 0 -> - Op = cowboy_req:method(Req), - Class = cowboy_req:binding(class, Req, <<>>), - Msisdn = cowboy_req:binding(msisdn, Req, <<>>), - {Ip, Port} = cowboy_req:peer(Req), - IpStr = inet:ntoa(Ip), - if is_tuple(DbCall) -> - ?SDebug("~s:~p ~s ~p ~s : TotalTime (micros) ~p = ~p (ReqTime) + ~p (DBTime)", - [IpStr, Port, Op, Class, Msisdn, Total, - timer:now_diff(DbCall, ReqTime), - timer:now_diff(Now, DbCall)]); - true -> - ?SDebug("~s:~p ~s ~p ~s : TotalTime ~p micros", - [IpStr, Port, Op, Class, Msisdn, Total]) - end; - true -> ok - end, - {stop, Req1, State}. diff --git a/src/dperl/services/dperl_service_oracle.hrl b/src/dperl/services/dperl_service_oracle.hrl deleted file mode 100644 index e31ffa6d..00000000 --- a/src/dperl/services/dperl_service_oracle.hrl +++ /dev/null @@ -1,215 +0,0 @@ --ifndef(_dperl_ora_). --define(_dperl_ora_, true). - --include_lib("dperl/dperl.hrl"). - --define(MAX_COMMENT_LENGTH, 200). --define(MAX_REQUESTOR_LENGTH, 20). - --define(RESP_BUFFER, list_to_binary(lists:duplicate(1024, 0))). - --define(SERVICE_MAXACCEPTORS(__SERVICE_NAME), - ?GET_CONFIG(maxNumberOfAcceptors, [__SERVICE_NAME], 100, - "Maximum number of TCP acceptors") - ). - --define(SERVICE_MAXCONNS(__SERVICE_NAME), - ?GET_CONFIG(maxNumberOfSockets, [__SERVICE_NAME], 5000, - "Maximum number of simulteneous connections") - ). - --define(SERVICE_PROBE_URL(__SERVICE_NAME), - ?GET_CONFIG(probeUrl, [__SERVICE_NAME], "/probe.html", - "Defines the url of the probe for the load balancer") - ). - --define(SERVICE_PROBE_RESP(__SERVICE_NAME), - ?GET_CONFIG(probeResp, [__SERVICE_NAME], - {200, - <<"" - "Service is alive" - "">>}, - "Response given to the load balancer when the probeUrl" - " is requested") - ). - --define(SERVICE_UPDATE_PERIOD(__SERVICE_NAME), - ?GET_CONFIG(serviceDynUpdatePeriod, [__SERVICE_NAME], 2000, - "Delay in millisecond between a service updates its DYN" - " info") - ). - --define(SERVICE_STATUS_RESET_PERIOD(__SERVICE_NAME), - ?GET_CONFIG(serviceDynResetPeriod, [__SERVICE_NAME], 3600 * 1000, - "Delay in millisecond between a service resets its DYN" - " info") - ). - --define(SERVICE_ACTIVE_THRESHOLD(__SERVICE_NAME), - ?GET_CONFIG(serviceStatusActiveThreshold, [__SERVICE_NAME], 1, - "Request count threshhold beyond which service is marked" - " as active in DYN") - ). - --define(SERVICE_OVERLOAD_THRESHOLD(__SERVICE_NAME), - ?GET_CONFIG(serviceStatusOverloadThreshold, [__SERVICE_NAME], 100, - "Request count threshhold beyond which service is marked" - " as overloaded in DYN") - ). - --define(SWAGGER_WHITELIST(__SERVICE_NAME), - ?GET_CONFIG(swaggerWhitelist, [__SERVICE_NAME], #{}, - "Whitelist for SwaggerClient Access")). - -%% sqls - -%% common sqls and binds --define(PROFILE_DELETE_SQL, <<" -BEGIN - :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_profile_msisdn_delete( - ProvisioningTenant=>:SQLT_CHR_APP, - Msisdn=>:SQLT_CHR_MSISDN, - Requestor=>:SQLT_CHR_REQUESTOR, - Remark=>:SQLT_CHR_REMARK - ); -END; -">>). - -%% topstopper sqls and binds --define(TOPSTOPPER_GET_SQL, <<" -BEGIN - :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_ts_msisdn_get( - ProvisioningTenant=>:SQLT_CHR_APP, - Msisdn=>:SQLT_CHR_MSISDN); -END; -">>). - --define(TOPSTOPPER_POST_SQL, <<" -BEGIN - :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_ts_msisdn_post( - ProvisioningTenant=>:SQLT_CHR_APP, - Msisdn=>:SQLT_CHR_MSISDN, - Requestor=>:SQLT_CHR_REQUESTOR, - Remark=>:SQLT_CHR_REMARK - ); -END; -">>). - --define(PROFILE_DELETE_TOPSTOPPER_POST_BINDS, - [{<<":SQLT_CHR_OUT_RESULT">>, out, 'SQLT_CHR'}, - {<<":SQLT_CHR_APP">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_MSISDN">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_REQUESTOR">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_REMARK">>, in, 'SQLT_CHR'}]). - --define(TOPSTOPPER_PUT_SQL, <<" -BEGIN - :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_ts_msisdn_put( - ProvisioningTenant=>:SQLT_CHR_APP, - Msisdn=>:SQLT_CHR_MSISDN, - BarringType=>:SQLT_CHR_BARRINGTYPE, - TopStopLimit=>:SQLT_VNU_TOPSTOPLIMIT, - Requestor=>:SQLT_CHR_REQUESTOR, - Remark=>:SQLT_CHR_REMARK - ); -END; -">>). - --define(TOPSTOPPER_PUT_BINDS, - [{<<":SQLT_CHR_OUT_RESULT">>, out, 'SQLT_CHR'}, - {<<":SQLT_CHR_APP">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_MSISDN">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_BARRINGTYPE">>, in, 'SQLT_CHR'}, - {<<":SQLT_VNU_TOPSTOPLIMIT">>, in, 'SQLT_VNU'}, - {<<":SQLT_CHR_REQUESTOR">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_REMARK">>, in, 'SQLT_CHR'}]). - --define(TOPSTOPPER_RESET_SQL, <<" -BEGIN - :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_ts_msisdn_reset( - ProvisioningTenant=>:SQLT_CHR_APP, - Msisdn=>:SQLT_CHR_MSISDN, - BarringType=>:SQLT_CHR_BARRINGTYPE, - Requestor=>:SQLT_CHR_REQUESTOR, - Remark=>:SQLT_CHR_REMARK - ); -END; -">>). - - --define(TOPSTOPPER_REVERT_SQL, <<" -BEGIN - :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_ts_msisdn_revert( - ProvisioningTenant=>:SQLT_CHR_APP, - Msisdn=>:SQLT_CHR_MSISDN, - BarringType=>:SQLT_CHR_BARRINGTYPE, - Requestor=>:SQLT_CHR_REQUESTOR, - Remark=>:SQLT_CHR_REMARK - ); -END; -">>). - --define(TOPSTOPPER_RESET_REVERT_BINDS, - [{<<":SQLT_CHR_OUT_RESULT">>, out, 'SQLT_CHR'}, - {<<":SQLT_CHR_APP">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_MSISDN">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_BARRINGTYPE">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_REQUESTOR">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_REMARK">>, in, 'SQLT_CHR'}]). - -%% barring sqls and binds --define(BARRING_GET_SQL, <<" -BEGIN - :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_barring_msisdn_get( - ProvisioningTenant=>:SQLT_CHR_APP, - Msisdn=>:SQLT_CHR_MSISDN - ); -END; -">>). - --define(TOPSTOPPER_BARRING_GET_BINDS, - [{<<":SQLT_CHR_OUT_RESULT">>, out, 'SQLT_CHR'}, - {<<":SQLT_CHR_APP">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_MSISDN">>, in, 'SQLT_CHR'}]). - --define(BARRING_POST_SQL, <<" -BEGIN - :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_barring_msisdn_post( - ProvisioningTenant=>:SQLT_CHR_APP, - Msisdn=>:SQLT_CHR_MSISDN, - Requestor=>:SQLT_CHR_REQUESTOR, - Remark=>:SQLT_CHR_REMARK - ); -END; -">>). - --define(BARRING_POST_BINDS, - [{<<":SQLT_CHR_OUT_RESULT">>, out, 'SQLT_CHR'}, - {<<":SQLT_CHR_APP">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_MSISDN">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_REMARK">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_REQUESTOR">>, in, 'SQLT_CHR'}]). - --define(BARRING_PUT_SQL, <<" -BEGIN - :SQLT_CHR_OUT_RESULT := pkg_ora_dperl.aaa_barring_msisdn_put( - ProvisioningTenant=>:SQLT_CHR_APP, - Msisdn=>:SQLT_CHR_MSISDN, - BarringType=>:SQLT_CHR_BARRINGTYPE, - Requestor=>:SQLT_CHR_REQUESTOR, - BarringLevel=>:SQLT_INT_LEVEL, - Remark=>:SQLT_CHR_REMARK - ); -END; -">>). - --define(BARRING_PUT_BINDS, - [{<<":SQLT_CHR_OUT_RESULT">>, out, 'SQLT_CHR'}, - {<<":SQLT_CHR_APP">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_MSISDN">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_BARRINGTYPE">>, in, 'SQLT_CHR'}, - {<<":SQLT_INT_LEVEL">>, in, 'SQLT_INT'}, - {<<":SQLT_CHR_REMARK">>, in, 'SQLT_CHR'}, - {<<":SQLT_CHR_REQUESTOR">>, in, 'SQLT_CHR'}]). - --endif. %_dperl_ora_ From 8af0facb7b5c2fdad17976caa37a3acda45c067a Mon Sep 17 00:00:00 2001 From: shamis Date: Tue, 28 Apr 2020 16:27:22 +0200 Subject: [PATCH 09/72] changed cifs to local --- src/dperl/jobs/dperl_file_copy.erl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/dperl/jobs/dperl_file_copy.erl b/src/dperl/jobs/dperl_file_copy.erl index 7f1dd027..c05aa0a8 100644 --- a/src/dperl/jobs/dperl_file_copy.erl +++ b/src/dperl/jobs/dperl_file_copy.erl @@ -383,7 +383,6 @@ parse_args(#{default := _} = Args) -> Error end. -process_default(#{proto := cifs, root := _} = Default) -> {ok, Default}; process_default(#{proto := sftp, host := _, user := _, password := _, root := _} = Default) -> {ok, Default#{opts => maps:get(opts, Default, []), @@ -392,6 +391,7 @@ process_default(#{proto := sftp, host := _, user := _, key := _, root := _} = Default) -> {ok, Default#{port => maps:get(port, Default, ?SSH_DEFAULT_PORT), opts => maps:get(opts, Default, [])}}; +process_default(#{proto := local, root := _} = Default) -> {ok, Default}; process_default(Default) -> ?JError("Invalid Default : ~p", [Default]), {error, badarg}. @@ -724,14 +724,14 @@ all_test_() -> parse_args_invalid_dst_test() -> put(name, <<"test">>), - ?assertEqual({error, badarg}, parse_args(#{status_dir => s, status_path => s, status_extra => e}, #{default => #{proto => cifs, root => r, mask => ""}}, #{default => #{}}, s)). + ?assertEqual({error, badarg}, parse_args(#{status_dir => s, status_path => s, status_extra => e}, #{default => #{proto => local, root => r, mask => ""}}, #{default => #{}}, s)). init_error_test() -> put(name, <<"test">>), ?assertEqual({stop, badarg}, init({#dperlJob{name = <<"test">>, args = #{status_dir => "test", status_path => "config", status_extra => "e"}, - dstArgs = #{default => #{proto => cifs, root => r, mask => ""}}, - srcArgs = #{default => #{proto => cifs, root => r, mask => ""}}}, #state{}})). + dstArgs = #{default => #{proto => local, root => r, mask => ""}}, + srcArgs = #{default => #{proto => local, root => r, mask => ""}}}, #state{}})). dstFileName_test_() -> {inparallel, [ From d81004cd5ad6d3f80910e63047762ef1ecb75336 Mon Sep 17 00:00:00 2001 From: shamis Date: Tue, 28 Apr 2020 18:44:55 +0200 Subject: [PATCH 10/72] added file copy ct test --- rebar.config | 4 + test/dperl_file_copy_SUITE.erl | 180 +++++++++++++++++++++++++++++++++ 2 files changed, 184 insertions(+) create mode 100644 test/dperl_file_copy_SUITE.erl diff --git a/rebar.config b/rebar.config index db60ffa7..db2836d7 100644 --- a/rebar.config +++ b/rebar.config @@ -110,5 +110,9 @@ {pre_hooks, [{release, "escript inject_git_rev.escript prod"}]}, {post_hooks, [{release, "bash ./post_release.sh"}]}, {relx, [{dev_mode, false}, {include_src, false}]} + ]}, + {test, [ + {deps, [meck]}, + {dist_node, [{setcookie, 'testcookie'}, {sname, 'testnode'}]} ]} ]}. diff --git a/test/dperl_file_copy_SUITE.erl b/test/dperl_file_copy_SUITE.erl new file mode 100644 index 00000000..994ebe02 --- /dev/null +++ b/test/dperl_file_copy_SUITE.erl @@ -0,0 +1,180 @@ +-module(dperl_file_copy_SUITE). + +-include_lib("../src/dperl/dperl.hrl"). + +-include_lib("common_test/include/ct.hrl"). + +-export([all/0, init_per_suite/1, end_per_suite/1]). + +-export([test/1]). + +all() -> + ct:pal(info, ?MAX_IMPORTANCE, ?MODULE_STRING ++ ":all/0 - Start ===>~n", []), + [test]. + +init_per_suite(Config) -> + file:make_dir("Data"), + file:make_dir("Data/test1"), + file:make_dir("Data/test2"), + file:make_dir("Data/test3"), + file:make_dir("Data/test1/backup"), + file:make_dir("Data/test2/backup"), + file:make_dir("Data/test3/backup"), + file:make_dir("Data/SMSC_CUC"), + file:make_dir("Data/SMSC_CUC/smch40"), + file:make_dir("Data/SMSC_CUC/smch40/tmp"), + file:make_dir("Data/SMSC_CUC/smch40/test1"), + file:make_dir("Data/SMSC_CUC/smch40/test2"), + file:make_dir("Data/SMSC_CUC/smch40/test3"), + file:make_dir("Programs"), + file:make_dir("Programs/SFHSmscCUC"), + ct:pal(info, ?MAX_IMPORTANCE, ?MODULE_STRING ++ "Starting ensure dperl ===>~n", []), + application:load(dderl), + application:set_env(dderl, port, 8552), + application:ensure_all_started(dderl), + Config. + +end_per_suite(_Config) -> + application:stop(dperl). + +test(_Config) -> + {ok, Cwd} = file:get_cwd(), + Path = "Data/SMSC_CUC/smch40", + Data = "Data", + Job = #dperlJob{name = <<"SFHSmscCUC1">>,module = dperl_file_copy, + args = #{cleanup => false,refresh => false,sync => true, debug => true, + status_dir => "SFHSmscCUC", status_extra => "WORKING\n600\n900", + status_path => filename:join(Cwd, "Programs")}, + srcArgs = #{default => #{root => filename:join(Cwd, "Data"), + proto => local, mask => + "CUCA_vimszmos-smin?1_1_########_??????????????.CSV"}}, + dstArgs = #{default => #{proto => local, tmp_dir => "tmp", + root => filename:join(Cwd, Path)}}, + enabled = true,running = true,plan = at_most_once, + nodes = [], opts = []}, + ct:pal(info, ?MAX_IMPORTANCE, ?MODULE_STRING ++ "About to Start SFH SMSCUC job :)~n", []), + dperl_dal:write(dperlJob, Job), + ct:pal(info, ?MAX_IMPORTANCE, ?MODULE_STRING ++ "Starting SFH SMSCUC job :)~n", []), + ct:sleep({seconds, 3}), + File1 = "CUCA_vimszmos-smin41_1_00000868_20181102205842.CSV", + Header = <<"SubmitTime\tSubmitMicros\tSubmitGti\tSubmitApp\tOrigAddress\tOrigAddressTon\tOrigAddressNpi\tOrigCharset\t" + "OrigGti\tOrigSca\tOrigImsi\tOrigEsmeId\tOrigIp\tOrigBillingId\tRecipAddress\tRecipAddressTon\t" + "RecipAddressNpi\tRecipImsi\tDeliverTime\tDeliverMicros\tDeliverAttempt\tDeliverAddress\tDeliverAddressTon\t" + "DeliverAddressNpi\tDeliverGti\tDeliverImsi\tDeliverEsmeId\tDeliverIp\tMsgReference\tMsgPid\tMsgReqType\t" + "MsgScheduleTime\tMsgExpiryTime\tMsgStatus\tSegId\tSegCount\tSegLength\tErrorType\tErrorCode\tDiaResult\t" + "OrigDcs\tOrigMessageId\tOrigImei\tDeliverImei\tOrigPaniHeader\tDeliverPaniHeader\tDeliverMapGti\r\n">>, + Bin = <<"20181102205442+0100\t542176\t\t\tSwisscom\t5\t0\t0\t\t\t\t36530\t10.196.28.45/51096\t\t41794937801\t1\t1\t\t" + "20181102205442+0100\t0\t1\t41794937801\t1\t1\t\t\t\t\t1541188482542176498fe861O\t0\t4356\t\t20181102215440\t" + "9\t1\t1\t8\t2\t19733248\tNA\t4\t626090BD\t\t\t\t\t\r\n">>, + file:write_file(filename:join(Data, File1), [Header, Bin]), + ct:sleep({seconds, 5}), + meck:new(imem_file, [passthrough]), + meck:expect(imem_file, list_dir, 3, {error, enoent}), + File2 = "CUCA_vimszmos-smin41_1_00000869_20181102205852.CSV", + file:write_file(filename:join(Data, File2), [Header, [Bin || _ <- lists:seq(1, 2530)]]), + ct:sleep({seconds, 5}), + meck:delete(imem_file, list_dir, 3, false), + meck:expect(imem_file, open, + fun(#{path := Root}, File, Modes, _) -> + case re:run(File, "tmp") of + nomatch -> {error, enoent}; + _ -> file:open(filename:join(Root, File), lists:usort([binary | Modes])) + end + end), + ct:sleep({seconds, 6}), + meck:delete(imem_file, open, 4, false), + ct:sleep({seconds, 1}), + ok = file:write_file("Programs/SFHSmscCUC/SFHSmscCUC1.hst", string:join([File1 || _ <- lists:seq(1, 100)], "\n")), + meck:expect(imem_file, delete, 3, {error, enoent}), + ct:sleep({seconds, 8}), + ok = file:delete("Programs/SFHSmscCUC/SFHSmscCUC1.hst"), + meck:expect(imem_file, rename, 4, {error, enoent}), + ct:sleep({seconds, 8}), + meck:expect(imem_file, write, 4, {error, enoent}), + ct:sleep({seconds, 8}), + meck:expect(imem_file, write, 4, {error, closed}), + ct:sleep({seconds, 8}), + meck:expect(imem_file, pread, 5, {error, enoent}), + ct:sleep({seconds, 8}), + meck:expect(imem_file, pread, 5, {error, closed}), + ct:sleep({seconds, 8}), + meck:expect(imem_file, open, 4, {error, enoent}), + ct:sleep({seconds, 8}), + meck:unload(imem_file), + ct:sleep({seconds, 9}), + dperl_dal:disable(Job), + {ok, Files} = file:list_dir(Path), + true = lists:member(File1, Files), + true = lists:member(File2, Files), + {ok, StatusFiles} = file:list_dir("Programs/SFHSmscCUC"), + true = lists:member("ServiceActivityLog.sal", StatusFiles), + true = lists:member("SFHSmscCUC1.hst", StatusFiles), + meck:new(imem_compiler, [passthrough]), + meck:expect(imem_compiler, compile, fun(_) -> error(badfun) end), + dperl_dal:write(dperlJob, Job), + ct:sleep({seconds, 4}), + meck:unload(imem_compiler), + Job2 = Job#dperlJob{name = <<"SFHSmscCUC2">>, + srcArgs = #{default => #{root => filename:join(Cwd, "Data"), proto => local, + mask => "CUCA_vimszmos-smin?1_1_########_??????????????.CSV"}, + test1 => #{path => "test1", backup => "test1/backup"}, + <<"test2">> => #{path => "test2", backup => "test2/backup"}, + "test3" => #{path => "test3", backup => "test3/backup"}}, + dstArgs = #{default => #{proto => local, mask => [], root => filename:join(Cwd, Path)}, + test1 => #{path => "test1"}, + <<"test2">> => #{path => "test2"}, + "test3" => #{path => "test3"}}}, + dperl_dal:write(dperlJob, Job2), + ct:sleep({seconds, 3}), + file:write_file(filename:join([Data, "test1", File1]), []), + file:write_file(filename:join([Data, "test2", File1]), []), + file:write_file(filename:join([Data, "test1", File2]), [Header, Bin]), + file:write_file(filename:join([Data, "test2", File2]), [Header, Bin]), + ct:sleep({seconds, 4}), + meck:new(imem_file, [passthrough]), + meck:expect(imem_file, rename, + fun(_Ctx, Src, Dst, _) -> + case re:run(Dst, "backup") of + nomatch -> file:rename(Src, Dst); + _ -> {error, enoent} + end + end), + file:write_file(filename:join([Data, "test3", File1]), [Header, Bin]), + ct:sleep({seconds, 4}), + meck:expect(imem_file, write, 4, {error, closed}), + file:write_file(filename:join([Data, "test3", File2]), [Header, Bin]), + ct:sleep({seconds, 8}), + meck:expect(imem_file, open, 4, {error, closed}), + ct:sleep({seconds, 8}), + meck:delete(imem_file, open, 4, false), + meck:delete(imem_file, write, 4, false), + meck:expect(imem_file, rename, 4, {error, enoent}), + ct:sleep({seconds, 8}), + meck:unload(imem_file), + ct:sleep({seconds, 8}), + dperl_dal:disable(Job2), + dperl_dal:write(dperlJob, Job2), + ct:sleep({seconds, 2}), + Pid = global:whereis_name("SFHSmscCUC"), + exit(Pid, kill), + meck:new(inet, [unstick, passthrough]), + meck:expect(inet, gethostname, 0, {error, test}), + ct:sleep({seconds, 5}), + meck:unload(inet), + dperl_dal:disable(Job2), + {ok, Files1} = file:list_dir(filename:join(Path, "test1")), + true = lists:member(File1, Files1), + true = lists:member(File2, Files1), + {ok, Files2} = file:list_dir(filename:join(Path, "test2")), + true = lists:member(File1, Files2), + true = lists:member(File2, Files2), + {ok, BFiles1} = file:list_dir(filename:join([Data, "test1", "backup"])), + true = lists:member(File1, BFiles1), + true = lists:member(File2, BFiles1), + {ok, BFiles2} = file:list_dir(filename:join([Data, "test2", "backup"])), + true = lists:member(File1, BFiles2), + true = lists:member(File2, BFiles2), + {ok, StatusFiles2} = file:list_dir("Programs/SFHSmscCUC"), + true = lists:member("SFHSmscCUC2(test1).hst", StatusFiles2), + true = lists:member("SFHSmscCUC2(test2).hst", StatusFiles2), + ct:pal(info, ?MAX_IMPORTANCE, ?MODULE_STRING ++ "Disabled SFH SMSCUC job :)~n", []). From 3b3a7de72ba55ac9ca2de5a908bd6a751ca5e50c Mon Sep 17 00:00:00 2001 From: shamis Date: Wed, 29 Apr 2020 10:15:46 +0200 Subject: [PATCH 11/72] Review changes --- rebar.config | 4 +- src/ouraring_crawl.erl | 104 ----------------------------------------- test.escript | 28 ----------- 3 files changed, 2 insertions(+), 134 deletions(-) delete mode 100644 src/ouraring_crawl.erl delete mode 100644 test.escript diff --git a/rebar.config b/rebar.config index db2836d7..185a03f2 100644 --- a/rebar.config +++ b/rebar.config @@ -112,7 +112,7 @@ {relx, [{dev_mode, false}, {include_src, false}]} ]}, {test, [ - {deps, [meck]}, - {dist_node, [{setcookie, 'testcookie'}, {sname, 'testnode'}]} + {deps, [meck]} +% {dist_node, [{setcookie, 'testcookie'}, {sname, 'testnode'}]} ]} ]}. diff --git a/src/ouraring_crawl.erl b/src/ouraring_crawl.erl deleted file mode 100644 index 8f237caa..00000000 --- a/src/ouraring_crawl.erl +++ /dev/null @@ -1,104 +0,0 @@ --module(ouraring_crawl). - --export([run/0, run/1]). - --define(OAUTH2_URL_PREFIX, "https://cloud.ouraring.com"). --define(API_URL_PREFIX, "https://api.ouraring.com"). -run() -> - {ok, _} = application:ensure_all_started(inets), - {ok, _} = application:ensure_all_started(ssl), - run(#{ - client_id => "REMERNOADFFDIN3O", - client_secret => "HYJEW2WTIVIEXQNOTPDN7Y346GYSLNL3", - cb_uri => "https://127.0.0.1:8443/callback", - user_email => "max.ochsenbein@k2informatics.ch", - user_password => "cFMMax--XG$k2sa", - state => "any+value+as+state" - }). - -run(#{ - client_id := ClientId, - client_secret := ClientSecret, - cb_uri := CallbackUri, - user_email := Email, - user_password := Password, - state := State -}) -> - inets:stop(httpc, ?MODULE), - {ok, _} = inets:start(httpc, [{profile, ?MODULE}]), - ok = httpc:set_options([{cookies, enabled}], ?MODULE), - Url = ?OAUTH2_URL_PREFIX ++ "/oauth/authorize" - ++ "?response_type=code" - ++ "&client_id=" ++ ClientId - ++ "&redirect_uri=" ++ edoc_lib:escape_uri(CallbackUri) - ++ "&scope=email+personal+daily" - ++ "&state=" ++ State, - %io:format(">>>>>>>>>> authorize: ~s~n", [Url]), - {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302, []}} = httpc:request(get, {Url, []}, [{autoredirect, false}], [], ?MODULE), - RedirectUri = ?OAUTH2_URL_PREFIX ++ proplists:get_value("location", RespHeader302), - % io:format(">>>>>>>>>> 302 Redirect: ~s~n", [RedirectUri]), - {ok, {{"HTTP/1.1",200,"OK"}, RespHeader, _Body}} = httpc:request(get, {RedirectUri, []}, [{autoredirect, false}], [], ?MODULE), - SetCookieHeader = proplists:get_value("set-cookie", RespHeader), - {match, [XRefCookie]} = re:run(SetCookieHeader, ".*_xsrf=(.*);.*", [{capture, [1], list}]), - {ok,{{"HTTP/1.1",302,"Found"}, RespHeader302_1, []}} = httpc:request( - post, { - RedirectUri, [], "application/x-www-form-urlencoded", - "_xsrf="++edoc_lib:escape_uri(XRefCookie) - ++ "&email=" ++ edoc_lib:escape_uri(Email) - ++ "&password=" ++ edoc_lib:escape_uri(Password) - }, [{autoredirect, false}], [], ?MODULE - ), - RedirectUri_1 = ?OAUTH2_URL_PREFIX ++ proplists:get_value("location", RespHeader302_1), - % io:format(">>>>>>>>>> 302 Redirect: ~s~n", [RedirectUri_1]), - {ok, {{"HTTP/1.1",200,"OK"}, _, _}} = httpc:request(get, {RedirectUri_1, []}, [{autoredirect, false}], [], ?MODULE), - {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302_2, []}} = httpc:request( - post, { - RedirectUri_1, [], "application/x-www-form-urlencoded", - "_xsrf="++edoc_lib:escape_uri(XRefCookie) - ++ "&scope_email=on" - ++ "&scope_personal=on" - ++ "&scope_daily=on" - ++ "&allow=Accept" - }, [{autoredirect, false}], [], ?MODULE - ), - RedirectUri_2 = proplists:get_value("location", RespHeader302_2), - % io:format(">>>>>>>>>> 302 RedirectUri: ~s~n", [RedirectUri_2]), - #{query := QueryString} = uri_string:parse(RedirectUri_2), - #{"code" := Code} = maps:from_list(uri_string:dissect_query(QueryString)), - % io:format(">>>>>>>>>> Code: ~p~n", [Code]), - {ok, {{"HTTP/1.1",200,"OK"}, _, BodyJson}} = httpc:request( - post, { - ?OAUTH2_URL_PREFIX ++ "/oauth/token", [], "application/x-www-form-urlencoded", - "grant_type=authorization_code" - ++ "&code=" ++ Code - ++ "&redirect_uri=" ++ edoc_lib:escape_uri(CallbackUri) - ++ "&client_id=" ++ ClientId - ++ "&client_secret=" ++ ClientSecret - }, [{autoredirect, false}], [], ?MODULE - ), - #{<<"access_token">> := AccessToken} = Auth = jsx:decode(list_to_binary(BodyJson), [return_maps]), - io:format("Auth ~p~n", [Auth]), - io:format("-----~nUserInfo :~n~p~n-----~n", [userinfo(AccessToken)]), - io:format("-----~nSleep :~n~p~n-----~n", [sleep(AccessToken)]), - io:format("-----~nActivity :~n~p~n-- Activity --~n", [activity(AccessToken)]). - -userinfo(AccessToken) -> - {ok,{{"HTTP/1.1",200,"OK"}, _, UserInfoJson}} = httpc:request( - get, {?API_URL_PREFIX ++ "/v1/userinfo", [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, - [{autoredirect, false}], [], ?MODULE - ), - jsx:decode(list_to_binary(UserInfoJson), [return_maps]). - -sleep(AccessToken) -> - {ok,{{"HTTP/1.1",200,"OK"}, _, SleepInfoJson}} = httpc:request( - get, {?API_URL_PREFIX ++ "/v1/sleep", [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, - [{autoredirect, false}], [], ?MODULE - ), - jsx:decode(list_to_binary(SleepInfoJson), [return_maps]). - -activity(AccessToken) -> - {ok,{{"HTTP/1.1",200,"OK"}, _, ActivityInfoJson}} = httpc:request( - get, {?API_URL_PREFIX ++ "/v1/activity", [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}]}, - [{autoredirect, false}], [], ?MODULE - ), - jsx:decode(list_to_binary(ActivityInfoJson), [return_maps]). diff --git a/test.escript b/test.escript deleted file mode 100644 index 79f8e9ec..00000000 --- a/test.escript +++ /dev/null @@ -1,28 +0,0 @@ -ng -*- -%%! -smp enable -pa _build/default/lib/oranif/ebin/ - --define(TNS, << - "(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=" - "(PROTOCOL=TCP)(HOST=192.1681.160)(PORT=1521)))" - "(CONNECT_DATA=(SERVER=dedicated)(SERVICE_NAME=orclpdb1)))" - >>). --define(USER, <<"scott">>). --define(PSWD, <<"regit">>). - --define(TEST_SQL, <<"select 1 from dual">>). - - -main(_) -> - ok = dpi:load_unsafe(), - Ctx = dpi:context_create(3, 0), - Conn = dpi:conn_create(Ctx, ?USER, ?PSWD, ?TNS, #{}, #{}), - Stmt = dpi:conn_prepareStmt(Conn, false, ?TEST_SQL, <<>>), - 1 = dpi:stmt_execute(Stmt, []), - #{found := true} = dpi:stmt_fetch(Stmt), - #{data := Result} = - dpi:stmt_getQueryValue(Stmt, 1), - 1.0 = dpi:data_get(Result), - io:format("done ~n", []), - halt(1). - - From c690b25e5b88f54e36f4f168d52945841130f559 Mon Sep 17 00:00:00 2001 From: shamis Date: Wed, 29 Apr 2020 10:24:06 +0200 Subject: [PATCH 12/72] updated meck to 0.8.13 version --- rebar.config | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/rebar.config b/rebar.config index 185a03f2..6e0e841e 100644 --- a/rebar.config +++ b/rebar.config @@ -111,8 +111,5 @@ {post_hooks, [{release, "bash ./post_release.sh"}]}, {relx, [{dev_mode, false}, {include_src, false}]} ]}, - {test, [ - {deps, [meck]} -% {dist_node, [{setcookie, 'testcookie'}, {sname, 'testnode'}]} - ]} + {test, [{deps, [{meck, "0.8.13"}]}]} ]}. From c7233700708f3ee65e54ff238394ca228b94a9a8 Mon Sep 17 00:00:00 2001 From: shamis Date: Wed, 29 Apr 2020 13:37:46 +0200 Subject: [PATCH 13/72] fetching only latest sync day data --- src/dperl/jobs/dperl_ouraring_crawl.erl | 40 +++++++++++++++++++------ 1 file changed, 31 insertions(+), 9 deletions(-) diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index 3c192924..8a7ad96c 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -204,32 +204,49 @@ fetch_metrics([Type | Types], State) -> case fetch_metric(Type, Day, State) of {error, Error} -> {error, Error}; - State1 -> + none -> + fetch_metrics(Types, State); + {ok, MDay, Metric} -> + State1 = set_metric_day(Type, MDay, State#state{infos = [Metric | State#state.infos]}), fetch_metrics(Types, State1) end end. fetch_metric(Type, Day, #state{api_url = ApiUrl, access_token = AccessToken} = State) -> ?JInfo("Fetching metric for ~s on ~p", [Type, Day]), - Url = ApiUrl ++ "/v1/" ++ Type ++ day_query(Day), - TypeBin = list_to_binary(Type), - case exec_req(Url, AccessToken) of - #{TypeBin := []} -> - NextDay = next_day(Day), + NextDay = next_day(Day), + case fetch_metric(Type, day_query(Day), ApiUrl, AccessToken) of + none -> case NextDay =< edate:yesterday() of true -> fetch_metric(Type, NextDay, State); false -> State end; - Metric when is_map(Metric) -> - Info = {["ouraring", Type], Metric#{<<"_day">> => list_to_binary(edate:date_to_string(Day))}}, - set_metric_day(Type, Day, State#state{infos = [Info | State#state.infos]}); + {ok, Metric} -> + case fetch_metric(Type, start_day_query(NextDay), ApiUrl, AccessToken) of + {ok, _} -> + {ok, Day, {["ouraring", Type], Metric#{<<"_day">> => list_to_binary(edate:date_to_string(Day))}}}; + Other -> + Other + end; {error, Error} -> ?JError("Error fetching ~s for ~p : ~p", [Type, Day, Error]), {error, Error} end. +fetch_metric(Type, DayQuery, ApiUrl, AccessToken) -> + Url = ApiUrl ++ "/v1/" ++ Type ++ DayQuery, + TypeBin = list_to_binary(Type), + case exec_req(Url, AccessToken) of + #{TypeBin := []} -> + none; + Metric when is_map(Metric) -> + {ok, Metric}; + {error, Error} -> + {error, Error} + end. + fetch_userinfo(#state{api_url = ApiUrl, access_token = AccessToken} = State) -> case exec_req(ApiUrl ++ "/v1/userinfo", AccessToken) of UserInfo when is_map(UserInfo) -> @@ -283,6 +300,11 @@ day_query(Day) when is_tuple(Day) -> day_query(Day) when is_list(Day) -> "?start=" ++ Day ++ "&end=" ++ Day. +start_day_query(Day) when is_tuple(Day) -> + start_day_query(edate:date_to_string(Day)); +start_day_query(Day) when is_list(Day) -> + "?start=" ++ Day. + get_last_day("sleep", #state{last_sleep_day = LastSleepDay}) -> LastSleepDay; get_last_day("activity", #state{last_activity_day = LastActivityDay}) -> LastActivityDay; get_last_day("readiness", #state{last_readiness_day = LastReadinessDay}) -> LastReadinessDay. From 322af6b27e22e2050e69c9337f5405e212a448fc Mon Sep 17 00:00:00 2001 From: shamis Date: Thu, 30 Apr 2020 09:06:08 +0200 Subject: [PATCH 14/72] Review commens #4 --- rebar.config | 7 +++---- src/dperl/jobs/dperl_ouraring_crawl.erl | 11 ----------- 2 files changed, 3 insertions(+), 15 deletions(-) diff --git a/rebar.config b/rebar.config index 76dfaf0a..5afd17ad 100644 --- a/rebar.config +++ b/rebar.config @@ -43,13 +43,12 @@ {esaml, {git, "https://github.com/KonnexionsGmbH/esaml", {tag, "2.3.0"}}}, {imem, {git, "https://github.com/konnexionsgmbh/imem", {tag, "3.9.1"}}}, {oranif, {git, "https://github.com/konnexionsgmbh/oranif", {tag, "0.2.3"}}}, - {edate, {git, "https://github.com/dweldon/edate", {branch, "master"}}}, - {prometheus, "4.5.0"} + {edate, {git, "https://github.com/dweldon/edate", {branch, "master"}}} ]}. {erl_first_files, [ - "src/dperl_worker.erl", - "src/dperl_strategy_scr.erl" + "src/dperl/dperl_worker.erl", + "src/dperl/dperl_strategy_scr.erl" ]}. {deps_error_on_conflict, false}. diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index 8a7ad96c..f5488aae 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -313,14 +313,3 @@ set_metric_day("sleep", Day, State) -> State#state{last_sleep_day = Day}; set_metric_day("activity", Day, State) -> State#state{last_activity_day = Day}; set_metric_day("readiness", Day, State) -> State#state{last_readiness_day = Day}. -% format_links(Links) -> -% lists:map( -% fun(#{url := Url} = Link) -> -% NewUrl = -% case lists:last(Url) of -% $/ -> Url; -% _ -> Url ++ "/" -% end, -% Link#{url := NewUrl}; -% (Link) -> Link -% end, Links). From 625c26644230c4f4f10e85d0d7e3b867be0c1f72 Mon Sep 17 00:00:00 2001 From: shamis Date: Thu, 30 Apr 2020 10:49:52 +0200 Subject: [PATCH 15/72] fetching latest sleep and readiness data --- rebar.config | 5 ++--- src/dperl/jobs/dperl_ouraring_crawl.erl | 23 +++++++++++++++-------- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/rebar.config b/rebar.config index 5afd17ad..d99e64f1 100644 --- a/rebar.config +++ b/rebar.config @@ -41,7 +41,7 @@ {prometheus, "4.5.0"}, {erlimem, {git, "https://github.com/konnexionsgmbh/erlimem", {tag, "3.1.0"}}}, {esaml, {git, "https://github.com/KonnexionsGmbH/esaml", {tag, "2.3.0"}}}, - {imem, {git, "https://github.com/konnexionsgmbh/imem", {tag, "3.9.1"}}}, + {imem, {git, "https://github.com/konnexionsgmbh/imem", {branch, "master"}}}, {oranif, {git, "https://github.com/konnexionsgmbh/oranif", {tag, "0.2.3"}}}, {edate, {git, "https://github.com/dweldon/edate", {branch, "master"}}} ]}. @@ -64,8 +64,7 @@ {lager_extra_sinks, [access]} ]}. -{minimum_otp_vsn, "20.1"}. -{blacklisted_otp_vsns, ["20.3"]}. +{minimum_otp_vsn, "21.0"}. {eunit_opts, [ {skip_deps, true}, diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index f5488aae..530a7bba 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -217,18 +217,25 @@ fetch_metric(Type, Day, #state{api_url = ApiUrl, access_token = AccessToken} = S NextDay = next_day(Day), case fetch_metric(Type, day_query(Day), ApiUrl, AccessToken) of none -> - case NextDay =< edate:yesterday() of - true -> + case fetch_metric(Type, start_day_query(NextDay), ApiUrl, AccessToken) of + {ok, _} -> fetch_metric(Type, NextDay, State); - false -> + _Other -> State end; {ok, Metric} -> - case fetch_metric(Type, start_day_query(NextDay), ApiUrl, AccessToken) of - {ok, _} -> - {ok, Day, {["ouraring", Type], Metric#{<<"_day">> => list_to_binary(edate:date_to_string(Day))}}}; - Other -> - Other + Info = {["ouraring", Type], Metric#{<<"_day">> => list_to_binary(edate:date_to_string(Day))}}, + case Type of + Type when Type == "sleep"; Type == "readiness" -> + {ok, Day, Info}; + "activity" -> + % fetching activity only if next days data exists + case fetch_metric(Type, start_day_query(NextDay), ApiUrl, AccessToken) of + {ok, _} -> + {ok, Day, Info}; + Other -> + Other + end end; {error, Error} -> ?JError("Error fetching ~s for ~p : ~p", [Type, Day, Error]), From 69bc0c2c3b5f59724b229fc3495e53d81a7d09e0 Mon Sep 17 00:00:00 2001 From: shamis Date: Thu, 30 Apr 2020 15:25:06 +0200 Subject: [PATCH 16/72] added avatar id as a property --- src/dperl/jobs/dperl_ouraring_crawl.erl | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index 530a7bba..1ca10f53 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -17,7 +17,7 @@ -record(state, {name, channel, client_id, client_secret, password, email, cb_uri, is_connected = false, access_token, api_url, oauth_url, last_sleep_day, last_activity_day, last_readiness_day, - infos = [], auth_time, auth_expiry}). + infos = [], auth_time, auth_expiry, avatar_id}). % dperl_strategy_scr export -export([connect_check_src/1, get_source_events/2, connect_check_dst/1, @@ -158,7 +158,7 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel}, srcArgs = #{client_id := ClientId, user_password := Password, client_secret := ClientSecret, user_email := Email, cb_uri := CallbackUri, api_url := ApiUrl, - oauth_url := OauthUrl}}, State}) -> + oauth_url := OauthUrl, avatar_id := AvatarId}}, State}) -> ?JInfo("Starting ..."), ChannelBin = dperl_dal:to_binary(Channel), dperl_dal:create_check_channel(ChannelBin), @@ -167,7 +167,8 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel}, {ok, State#state{channel = ChannelBin, client_id = ClientId, client_secret = ClientSecret, password = Password, email = Email, cb_uri = CallbackUri, name = Name, - api_url = ApiUrl, oauth_url = OauthUrl}}; + api_url = ApiUrl, oauth_url = OauthUrl, + avatar_id = AvatarId}}; init(Args) -> ?JError("bad start parameters ~p", [Args]), {stop, badarg}. @@ -188,6 +189,8 @@ terminate(Reason, _State) -> httpc:reset_cookies(?MODULE), ?JInfo("terminate ~p", [Reason]). +%% private functions + fetch_metrics([], State) -> {ok, State}; fetch_metrics(["userinfo" | Types], State) -> case fetch_userinfo(State) of @@ -224,7 +227,8 @@ fetch_metric(Type, Day, #state{api_url = ApiUrl, access_token = AccessToken} = S State end; {ok, Metric} -> - Info = {["ouraring", Type], Metric#{<<"_day">> => list_to_binary(edate:date_to_string(Day))}}, + Key = build_key(Type, State#state.avatar_id), + Info = {Key, Metric#{<<"_day">> => list_to_binary(edate:date_to_string(Day))}}, case Type of Type when Type == "sleep"; Type == "readiness" -> {ok, Day, Info}; @@ -257,7 +261,7 @@ fetch_metric(Type, DayQuery, ApiUrl, AccessToken) -> fetch_userinfo(#state{api_url = ApiUrl, access_token = AccessToken} = State) -> case exec_req(ApiUrl ++ "/v1/userinfo", AccessToken) of UserInfo when is_map(UserInfo) -> - Info = {["ouraring", "userinfo"], UserInfo}, + Info = {build_key("userinfo", State#state.avatar_id), UserInfo}, State#state{infos = [Info | State#state.infos]}; {error, Error} -> ?JError("Error fetching userinfo : ~p", [Error]), @@ -266,7 +270,7 @@ fetch_userinfo(#state{api_url = ApiUrl, access_token = AccessToken} = State) -> get_day(Type, State) -> LastDay = get_last_day(Type, State), - Key = ["ouraring", Type], + Key = build_key(Type, State#state.avatar_id), Yesterday = edate:yesterday(), case dperl_dal:read_channel(State#state.channel, Key) of ?NOT_FOUND -> @@ -320,3 +324,5 @@ set_metric_day("sleep", Day, State) -> State#state{last_sleep_day = Day}; set_metric_day("activity", Day, State) -> State#state{last_activity_day = Day}; set_metric_day("readiness", Day, State) -> State#state{last_readiness_day = Day}. +build_key(Type, AvatarId) when is_list(Type), is_list(AvatarId)-> + [AvatarId, "ouraring", Type]. From 22ad119eeb7cc68b1d689cb6f1803e3e39804f87 Mon Sep 17 00:00:00 2001 From: shamis Date: Thu, 30 Apr 2020 18:28:07 +0200 Subject: [PATCH 17/72] code refactoring --- src/dperl/jobs/dperl_ouraring_crawl.erl | 116 +++++++++++++----------- 1 file changed, 65 insertions(+), 51 deletions(-) diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index 1ca10f53..a9d30786 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -36,58 +36,65 @@ connect_check_src(#state{is_connected = true, auth_expiry = ExpiresIn, auth_time connect_check_src(#state{is_connected = false, client_id = ClientId, cb_uri = CallbackUri, client_secret = ClientSecret, password = Password, email = Email, oauth_url = OauthUrl} = State) -> + ?Info("Generating new access token"), httpc:reset_cookies(?MODULE), - Url = OauthUrl ++ "/oauth/authorize" - ++ "?response_type=code" - ++ "&client_id=" ++ ClientId - ++ "&redirect_uri=" ++ edoc_lib:escape_uri(CallbackUri) - ++ "&scope=email+personal+daily" - ++ "&state=" ++ "test", - case httpc:request(get, {Url, []}, [{autoredirect, false}], [], ?MODULE) of - {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302, []}} -> - RedirectUri = OauthUrl ++ proplists:get_value("location", RespHeader302), - {ok, {{"HTTP/1.1",200,"OK"}, RespHeader, _Body}} = httpc:request(get, {RedirectUri, []}, [{autoredirect, false}], [], ?MODULE), - SetCookieHeader = proplists:get_value("set-cookie", RespHeader), - {match, [XRefCookie]} = re:run(SetCookieHeader, ".*_xsrf=(.*);.*", [{capture, [1], list}]), - {ok,{{"HTTP/1.1",302,"Found"}, RespHeader302_1, []}} = httpc:request( - post, { - RedirectUri, [], "application/x-www-form-urlencoded", - "_xsrf="++edoc_lib:escape_uri(XRefCookie) - ++ "&email=" ++ edoc_lib:escape_uri(Email) - ++ "&password=" ++ edoc_lib:escape_uri(Password) - }, [{autoredirect, false}], [], ?MODULE - ), - RedirectUri_1 = OauthUrl ++ proplists:get_value("location", RespHeader302_1), - {ok, {{"HTTP/1.1",200,"OK"}, _, _}} = httpc:request(get, {RedirectUri_1, []}, [{autoredirect, false}], [], ?MODULE), - {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302_2, []}} = httpc:request( - post, { - RedirectUri_1, [], "application/x-www-form-urlencoded", - "_xsrf="++edoc_lib:escape_uri(XRefCookie) - ++ "&scope_email=on" - ++ "&scope_personal=on" - ++ "&scope_daily=on" - ++ "&allow=Accept" - }, [{autoredirect, false}], [], ?MODULE - ), - RedirectUri_2 = proplists:get_value("location", RespHeader302_2), - #{query := QueryString} = uri_string:parse(RedirectUri_2), - #{"code" := Code} = maps:from_list(uri_string:dissect_query(QueryString)), - {ok, {{"HTTP/1.1",200,"OK"}, _, BodyJson}} = httpc:request( - post, { - OauthUrl ++ "/oauth/token", [], "application/x-www-form-urlencoded", - "grant_type=authorization_code" - ++ "&code=" ++ Code - ++ "&redirect_uri=" ++ edoc_lib:escape_uri(CallbackUri) - ++ "&client_id=" ++ ClientId - ++ "&client_secret=" ++ ClientSecret - }, [{autoredirect, false}], [], ?MODULE - ), - #{<<"access_token">> := AccessToken, <<"expires_in">> := ExpiresIn} = Auth = jsx:decode(list_to_binary(BodyJson), [return_maps]), - ?JInfo("Authentication successful : ~p", [Auth]), - {ok, State#state{is_connected = true, access_token = AccessToken, - auth_expiry = ExpiresIn, auth_time = imem_meta:time()}}; - Error -> - ?JError("Unexpected response : ~p", [Error]), + Params = #{ + "response_type" => "code", + "client_id" => ClientId, + "redirect_uri" => edoc_lib:escape_uri(CallbackUri), + "scope" => "email+personal+daily", + "state" => "test" + }, + Url = OauthUrl ++ "/oauth/authorize?" ++ binary_to_list(url_enc_params(Params)), + try + {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302, []}} = httpc:request( + get, {Url, []}, [{autoredirect, false}], [], ?MODULE), + RedirectUri = OauthUrl ++ proplists:get_value("location", RespHeader302), + {ok, {{"HTTP/1.1",200,"OK"}, RespHeader, _Body}} = httpc:request(get, {RedirectUri, []}, [{autoredirect, false}], [], ?MODULE), + SetCookieHeader = proplists:get_value("set-cookie", RespHeader), + {match, [XRefCookie]} = re:run(SetCookieHeader, ".*_xsrf=(.*);.*", [{capture, [1], list}]), + Params2 = #{ + "_xsrf" => edoc_lib:escape_uri(XRefCookie), + "email" => edoc_lib:escape_uri(Email), + "password" => edoc_lib:escape_uri(Password) + }, + {ok,{{"HTTP/1.1",302,"Found"}, RespHeader302_1, []}} = httpc:request( + post, { + RedirectUri, [], "application/x-www-form-urlencoded", + url_enc_params(Params2)}, [{autoredirect, false}], [], ?MODULE), + RedirectUri_1 = OauthUrl ++ proplists:get_value("location", RespHeader302_1), + {ok, {{"HTTP/1.1",200,"OK"}, _, _}} = httpc:request(get, {RedirectUri_1, []}, [{autoredirect, false}], [], ?MODULE), + Params3 = #{ + "_xsrf" => edoc_lib:escape_uri(XRefCookie), + "cope_email" => "on", + "scope_personal" => "on", + "scope_daily" => "on", + "allow" => "Accept" + }, + {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302_2, []}} = httpc:request( + post, { + RedirectUri_1, [], "application/x-www-form-urlencoded", + url_enc_params(Params3)}, [{autoredirect, false}], [], ?MODULE), + RedirectUri_2 = proplists:get_value("location", RespHeader302_2), + #{query := QueryString} = uri_string:parse(RedirectUri_2), + #{"code" := Code} = maps:from_list(uri_string:dissect_query(QueryString)), + Params4 = #{ + "grant_type" => "authorization_code", + "code" => Code, "client_id" => ClientId, + "redirect_uri" => edoc_lib:escape_uri(CallbackUri), + "client_secret" => ClientSecret + }, + {ok, {{"HTTP/1.1",200,"OK"}, _, BodyJson}} = httpc:request( + post, { + OauthUrl ++ "/oauth/token", [], "application/x-www-form-urlencoded", + url_enc_params(Params4)}, [{autoredirect, false}], [], ?MODULE), + #{<<"access_token">> := AccessToken, <<"expires_in">> := ExpiresIn} = Auth = jsx:decode(list_to_binary(BodyJson), [return_maps]), + ?JInfo("Authentication successful : ~p", [Auth]), + {ok, State#state{is_connected = true, access_token = AccessToken, + auth_expiry = ExpiresIn, auth_time = imem_meta:time()}} + catch + Class:Error:Stacktrace -> + ?JError("Unexpected response : ~p:~p:~p", [Class, Error, Stacktrace]), {error, invalid_return, State} end; connect_check_src(State) -> {ok, State}. @@ -326,3 +333,10 @@ set_metric_day("readiness", Day, State) -> State#state{last_readiness_day = Day} build_key(Type, AvatarId) when is_list(Type), is_list(AvatarId)-> [AvatarId, "ouraring", Type]. + +url_enc_params(Params) -> + EParams = maps:fold( + fun(K, V, Acc) -> + ["&", K, "=", V | Acc] + end, [], Params), + erlang:iolist_to_binary(tl(EParams)). \ No newline at end of file From 26820c6cc2eaf0a808ebd75c514c4e0e6721b276 Mon Sep 17 00:00:00 2001 From: shamis Date: Fri, 1 May 2020 09:12:17 +0200 Subject: [PATCH 18/72] added new line at the end --- src/dperl/jobs/dperl_ouraring_crawl.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index a9d30786..42c9fff6 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -339,4 +339,4 @@ url_enc_params(Params) -> fun(K, V, Acc) -> ["&", K, "=", V | Acc] end, [], Params), - erlang:iolist_to_binary(tl(EParams)). \ No newline at end of file + erlang:iolist_to_binary(tl(EParams)). From a0443d1142990e3b5782f9cb521b53bd19982ab3 Mon Sep 17 00:00:00 2001 From: shamis Date: Fri, 1 May 2020 14:18:55 +0200 Subject: [PATCH 19/72] default key_prefix added --- src/dperl/jobs/dperl_ouraring_crawl.erl | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index 42c9fff6..9bc2eb63 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -17,7 +17,7 @@ -record(state, {name, channel, client_id, client_secret, password, email, cb_uri, is_connected = false, access_token, api_url, oauth_url, last_sleep_day, last_activity_day, last_readiness_day, - infos = [], auth_time, auth_expiry, avatar_id}). + infos = [], auth_time, auth_expiry, key_prefix}). % dperl_strategy_scr export -export([connect_check_src/1, get_source_events/2, connect_check_dst/1, @@ -165,9 +165,10 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel}, srcArgs = #{client_id := ClientId, user_password := Password, client_secret := ClientSecret, user_email := Email, cb_uri := CallbackUri, api_url := ApiUrl, - oauth_url := OauthUrl, avatar_id := AvatarId}}, State}) -> + oauth_url := OauthUrl} = SrcArgs}, State}) -> ?JInfo("Starting ..."), ChannelBin = dperl_dal:to_binary(Channel), + KeyPrefix = maps:get(key_prefix, SrcArgs, []), dperl_dal:create_check_channel(ChannelBin), inets:start(httpc, [{profile, ?MODULE}]), ok = httpc:set_options([{cookies, enabled}], ?MODULE), @@ -175,7 +176,7 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel}, client_secret = ClientSecret, password = Password, email = Email, cb_uri = CallbackUri, name = Name, api_url = ApiUrl, oauth_url = OauthUrl, - avatar_id = AvatarId}}; + key_prefix = KeyPrefix}}; init(Args) -> ?JError("bad start parameters ~p", [Args]), {stop, badarg}. @@ -234,7 +235,7 @@ fetch_metric(Type, Day, #state{api_url = ApiUrl, access_token = AccessToken} = S State end; {ok, Metric} -> - Key = build_key(Type, State#state.avatar_id), + Key = build_key(Type, State#state.key_prefix), Info = {Key, Metric#{<<"_day">> => list_to_binary(edate:date_to_string(Day))}}, case Type of Type when Type == "sleep"; Type == "readiness" -> @@ -268,7 +269,7 @@ fetch_metric(Type, DayQuery, ApiUrl, AccessToken) -> fetch_userinfo(#state{api_url = ApiUrl, access_token = AccessToken} = State) -> case exec_req(ApiUrl ++ "/v1/userinfo", AccessToken) of UserInfo when is_map(UserInfo) -> - Info = {build_key("userinfo", State#state.avatar_id), UserInfo}, + Info = {build_key("userinfo", State#state.key_prefix), UserInfo}, State#state{infos = [Info | State#state.infos]}; {error, Error} -> ?JError("Error fetching userinfo : ~p", [Error]), @@ -277,7 +278,7 @@ fetch_userinfo(#state{api_url = ApiUrl, access_token = AccessToken} = State) -> get_day(Type, State) -> LastDay = get_last_day(Type, State), - Key = build_key(Type, State#state.avatar_id), + Key = build_key(Type, State#state.key_prefix), Yesterday = edate:yesterday(), case dperl_dal:read_channel(State#state.channel, Key) of ?NOT_FOUND -> @@ -331,8 +332,8 @@ set_metric_day("sleep", Day, State) -> State#state{last_sleep_day = Day}; set_metric_day("activity", Day, State) -> State#state{last_activity_day = Day}; set_metric_day("readiness", Day, State) -> State#state{last_readiness_day = Day}. -build_key(Type, AvatarId) when is_list(Type), is_list(AvatarId)-> - [AvatarId, "ouraring", Type]. +build_key(Type, KeyPrefix) when is_list(Type), is_list(KeyPrefix)-> + KeyPrefix ++ [Type]. url_enc_params(Params) -> EParams = maps:fold( From 257726a308003d47cd28a72e29598c6db6aac44a Mon Sep 17 00:00:00 2001 From: shamis Date: Fri, 8 May 2020 13:26:43 +0200 Subject: [PATCH 20/72] using encryption imem and fetching enc hash to the job --- rebar.config | 2 +- src/dperl/dperl_auth_cache.erl | 50 +++++++++++++++++++++++++ src/dperl/dperl_sup.erl | 6 ++- src/dperl/jobs/dperl_ouraring_crawl.erl | 29 ++++++++------ 4 files changed, 74 insertions(+), 13 deletions(-) create mode 100644 src/dperl/dperl_auth_cache.erl diff --git a/rebar.config b/rebar.config index d99e64f1..89ac4f99 100644 --- a/rebar.config +++ b/rebar.config @@ -41,7 +41,7 @@ {prometheus, "4.5.0"}, {erlimem, {git, "https://github.com/konnexionsgmbh/erlimem", {tag, "3.1.0"}}}, {esaml, {git, "https://github.com/KonnexionsGmbH/esaml", {tag, "2.3.0"}}}, - {imem, {git, "https://github.com/konnexionsgmbh/imem", {branch, "master"}}}, + {imem, {git, "https://github.com/konnexionsgmbh/imem", {branch, "encryption2"}}}, {oranif, {git, "https://github.com/konnexionsgmbh/oranif", {tag, "0.2.3"}}}, {edate, {git, "https://github.com/dweldon/edate", {branch, "master"}}} ]}. diff --git a/src/dperl/dperl_auth_cache.erl b/src/dperl/dperl_auth_cache.erl new file mode 100644 index 00000000..221d5647 --- /dev/null +++ b/src/dperl/dperl_auth_cache.erl @@ -0,0 +1,50 @@ +-module(dperl_auth_cache). + +-behaviour(gen_server). + +-export([start_link/0, + set_enc_hash/3, + get_enc_hash/1, + set_enc_hash_locally/3]). + +%% gen_server callbacks +-export([init/1, + handle_call/3, + handle_cast/2, + terminate/2]). + +-safe([set_enc_hash/3]). + +start_link() -> + gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). + +init([]) -> + {ok, #{}}. + +handle_call({setEncHash, JobOrServiceName, User, EncHash}, _From, State) -> + {reply, ok, State#{JobOrServiceName => {User, EncHash}}}; +handle_call({getEncHash, JobOrServiceName}, _From, State) -> + case State of + #{JobOrServiceName := {User, EncHash}} -> + {reply, {User, EncHash}, State}; + _ -> + {reply, undefined, State} + end; +handle_call(_Request, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +set_enc_hash(JobOrServiceName, User, EncHash) -> + DataNodes = [N || {_, N} <- imem_meta:data_nodes()], + rpc:multicall(DataNodes, ?MODULE, set_enc_hash_locally, [JobOrServiceName, User, EncHash]). + +set_enc_hash_locally(JobOrServiceName, User, EncHash) -> + gen_server:call(?MODULE, {setEncHash, JobOrServiceName, User, EncHash}). + +get_enc_hash(JobOrServiceName) -> + gen_server:call(?MODULE, {getEncHash, JobOrServiceName}). diff --git a/src/dperl/dperl_sup.erl b/src/dperl/dperl_sup.erl index e0b95398..04f4d758 100644 --- a/src/dperl/dperl_sup.erl +++ b/src/dperl/dperl_sup.erl @@ -83,7 +83,11 @@ init([]) -> #{id => dperl_service_cp, start => {dperl_cp, start_link, [service]}, restart => permanent, shutdown => 5000, type => worker, - modules => [dperl_cp]} + modules => [dperl_cp]}, + #{id => dperl_auth_cache, + start => {dperl_auth_cache, start_link, []}, + restart => permanent, shutdown => 5000, type => worker, + modules => [dperl_auth_cache]} ]}}. priv_dir() -> diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index 9bc2eb63..c4003803 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -166,17 +166,24 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel}, client_secret := ClientSecret, user_email := Email, cb_uri := CallbackUri, api_url := ApiUrl, oauth_url := OauthUrl} = SrcArgs}, State}) -> - ?JInfo("Starting ..."), - ChannelBin = dperl_dal:to_binary(Channel), - KeyPrefix = maps:get(key_prefix, SrcArgs, []), - dperl_dal:create_check_channel(ChannelBin), - inets:start(httpc, [{profile, ?MODULE}]), - ok = httpc:set_options([{cookies, enabled}], ?MODULE), - {ok, State#state{channel = ChannelBin, client_id = ClientId, - client_secret = ClientSecret, password = Password, - email = Email, cb_uri = CallbackUri, name = Name, - api_url = ApiUrl, oauth_url = OauthUrl, - key_prefix = KeyPrefix}}; + case dperl_auth_cache:get_enc_hash(Name) of + undefined -> + ?JError("Encryption hash is not avaialable"), + {stop, badarg}; + {User, EncHash} -> + ?JInfo("Starting with ~p's enchash...", [User]), + put(pwdHash, EncHash), + ChannelBin = dperl_dal:to_binary(Channel), + KeyPrefix = maps:get(key_prefix, SrcArgs, []), + dperl_dal:create_check_channel(ChannelBin), + inets:start(httpc, [{profile, ?MODULE}]), + ok = httpc:set_options([{cookies, enabled}], ?MODULE), + {ok, State#state{channel = ChannelBin, client_id = ClientId, + client_secret = ClientSecret, password = Password, + email = Email, cb_uri = CallbackUri, name = Name, + api_url = ApiUrl, oauth_url = OauthUrl, + key_prefix = KeyPrefix}} + end; init(Args) -> ?JError("bad start parameters ~p", [Args]), {stop, badarg}. From 73607be3bbe0478a2546692af70027a04e5b98ce Mon Sep 17 00:00:00 2001 From: shamis Date: Fri, 8 May 2020 13:50:43 +0200 Subject: [PATCH 21/72] changed put to sec interface --- src/dperl/jobs/dperl_ouraring_crawl.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index c4003803..723b2445 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -172,7 +172,7 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel}, {stop, badarg}; {User, EncHash} -> ?JInfo("Starting with ~p's enchash...", [User]), - put(pwdHash, EncHash), + imem_sec_mnesia:put_enc_hash(EncHash), ChannelBin = dperl_dal:to_binary(Channel), KeyPrefix = maps:get(key_prefix, SrcArgs, []), dperl_dal:create_check_channel(ChannelBin), From 7d62cdaa33a19430de1432c263159200a99f5d93 Mon Sep 17 00:00:00 2001 From: shamis Date: Fri, 8 May 2020 17:09:09 +0200 Subject: [PATCH 22/72] encHash in the fsm process --- rebar.config | 2 +- src/dderl_fsm.erl | 7 +++++++ src/imem_adapter.erl | 2 ++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/rebar.config b/rebar.config index 6e0e841e..206f37d1 100644 --- a/rebar.config +++ b/rebar.config @@ -40,7 +40,7 @@ {cowboy,"2.7.0"}, {erlimem, {git, "https://github.com/konnexionsgmbh/erlimem", {tag, "3.1.0"}}}, {esaml, {git, "https://github.com/KonnexionsGmbH/esaml", {tag, "2.3.0"}}}, - {imem, {git, "https://github.com/konnexionsgmbh/imem", {tag, "3.9.1"}}}, + {imem, {git, "https://github.com/konnexionsgmbh/imem", {branch, "encryption2"}}}, {oranif, {git, "https://github.com/konnexionsgmbh/oranif", {tag, "0.2.3"}}}, {prometheus, "4.5.0"} ]}. diff --git a/src/dderl_fsm.erl b/src/dderl_fsm.erl index 3c2c993e..e123a6d8 100644 --- a/src/dderl_fsm.erl +++ b/src/dderl_fsm.erl @@ -39,6 +39,7 @@ , stop/1 , inspect_status/1 , inspect_state/1 + , put_enc_hash/2 ]). -export([ rows/2 %% incoming rows [RowList,true] | [RowList,false] | [RowList,tail] RowList=list(KeyTuples) @@ -232,6 +233,8 @@ stop(Pid) -> inspect_status(Pid) -> gen_statem:call(Pid, inspect_status). +put_enc_hash(Pid, EncHash) -> gen_statem:call(Pid, {put_enc_hash, EncHash}). + inspect_state(Pid) -> gen_statem:call(Pid, inspect_state). @@ -685,6 +688,7 @@ reply_stack(SN,ReplyTo, #state{stack={button,_Button,RT},tRef=TRef}=State0) -> init({#ctx{} = Ctx, SessPid}) -> process_flag(trap_exit, true), true = link(SessPid), + #ctx{ bl = BL , replyToFun = ReplyTo , rowCols = RowCols @@ -1738,6 +1742,9 @@ handle_call(cache_data, From, SN, #state{tableId = TableId, ctx=#ctx{rowCols=Row {next_state, SN, State, [{reply, From, ok}]}; handle_call(inspect_status, From, SN, State) -> {next_state, SN, State, [{reply, From, SN}]}; +handle_call({put_enc_hash, EncHash}, From, SN, State) -> + imem_sec_mnesia:put_enc_hash(EncHash), + {next_state, SN, State, [{reply, From, ok}]}; handle_call(inspect_state, From, SN, State) -> {next_state, SN, State, [{reply, From, State}]}; handle_call(_Event, _From, empty, State) -> diff --git a/src/imem_adapter.erl b/src/imem_adapter.erl index 4b46dc93..8816674e 100644 --- a/src/imem_adapter.erl +++ b/src/imem_adapter.erl @@ -810,6 +810,8 @@ process_query(Query, Connection, Params, SessPid) -> , update_cursor_execute_funs = imem_adapter_funs:update_cursor_execute(Connection, StmtRefs) }, SessPid), erlimem_session:add_stmt_fsm(Connection, StmtRefs, {dderl_fsm, StmtFsm}), + EncHash = erlimem_session:run_cmd(Connection, get_enc_hash, []), + dderl_fsm:put_enc_hash(StmtFsm, EncHash), ?Debug("StmtRslt ~p ~p", [RowCols, SortSpec]), Columns = gen_adapter:build_column_json(lists:reverse(RowCols)), JSortSpec = build_srtspec_json(SortSpec), From 357ec52bc5d3e3f99c069b5bbb651daaab5ae8ce Mon Sep 17 00:00:00 2001 From: shamis Date: Fri, 15 May 2020 11:41:13 +0200 Subject: [PATCH 23/72] people fetch WIP --- priv/dev/static/index.html | 1 + priv/dev/static/index.js | 3 ++- priv/dev/static/scripts/login.js | 22 +++++++++++++++++++++ src/dderl.erl | 10 +++++----- src/dderl_session.erl | 34 ++++++++++++++++++++++++++++++++ 5 files changed, 64 insertions(+), 6 deletions(-) diff --git a/priv/dev/static/index.html b/priv/dev/static/index.html index 3dc6f0d2..84e6d582 100644 --- a/priv/dev/static/index.html +++ b/priv/dev/static/index.html @@ -23,6 +23,7 @@
  • About
  • Disconnect
  • Restart Application
  • +
  • Register Office 365
  • New Connection
  • diff --git a/priv/dev/static/index.js b/priv/dev/static/index.js index 9a0a4b1c..909bb28b 100644 --- a/priv/dev/static/index.js +++ b/priv/dev/static/index.js @@ -8,7 +8,7 @@ import {loginAjax} from "./scripts/login"; import {alert_jq} from './dialogs/dialogs'; import {dderlState, show_qry_files, change_password, show_about_dlg} from "./scripts/dderl"; -import {new_connection_tab, logout, restart} from "./scripts/login"; +import {new_connection_tab, logout, restart, authorize_office} from "./scripts/login"; import {disconnect_tab, close_tab} from "./scripts/connect"; import {newSqlEditor} from "./scripts/dderl.sql"; import {patch_jquery_ui} from "./jquery-ui-helper/helper.js"; @@ -93,3 +93,4 @@ addClick('btn-restart', () => { restart(); }); addClick('connect-button', () => { new_connection_tab(); }); addClick('newsql-button', () => { newSqlEditor(); }); addClick('btn-logout', () => { logout(); }); +addClick('btn-office-365', () => { authorize_office(); }); diff --git a/priv/dev/static/scripts/login.js b/priv/dev/static/scripts/login.js index bce79df2..229e24db 100644 --- a/priv/dev/static/scripts/login.js +++ b/priv/dev/static/scripts/login.js @@ -23,6 +23,21 @@ export function loginAjax(data = {}) { window.loginCb = loginCb; +const url = new URL(window.location.href); +const code = url.searchParams.get('code'); +const state = url.searchParams.get('state'); +if(code) { + // alert("code is : " + code + ' state is : ' + state); + dderlState.xsrfToken = state; + const body = { 'office_365_code': { 'code': code, 'state': state } }; + ajaxCall(null, 'office_365_code', body, 'office_365_code', function () { window.close(); }, + function () { alert('error!!!!'); }); +} else { + const error = url.searchParams.get('error'); + const errorDesc = url.searchParams.get('error_description'); + alert_jq('Login error : ' + error + ' : ' + errorDesc); +} + function loginCb(resp) { try { if (window.opener && window.opener.isScreensaver && window.opener.loginCb && $.isFunction(window.opener.loginCb)) { @@ -400,3 +415,10 @@ export function change_login_password(loggedInUser, shouldConnect) { else alert_jq("Confirm password missmatch!"); }); } + +export function authorize_office() { + const windowUrl = 'https://login.microsoftonline.com/common/oauth2/v2.0/authorize?client_id=353d2e74-ac01-4f48-8b84-d3017e7f91f4&response_type=code&redirect_uri=https%3A%2F%2Flocalhost:8443%2Fdderl%2F&response_mode=query&scope=openid%20offline_access%20https%3A%2F%2Fgraph.microsoft.com%2Fpeople.read&state=' + encodeURIComponent(dderlState.xsrfToken); + const params = 'scrollbars=no,resizable=no,status=no,location=no,toolbar=no,menubar=no,width=500,height=700,left=100,top=100'; + const windowRef = window.open(windowUrl, 'Office 365 login', params); + console.log(windowRef); +} \ No newline at end of file diff --git a/src/dderl.erl b/src/dderl.erl index c2674b15..c9eef886 100644 --- a/src/dderl.erl +++ b/src/dderl.erl @@ -100,8 +100,8 @@ init(Req, '$path_probe') -> init(Req, State) -> Url = iolist_to_binary(cowboy_req:uri(Req)), Req1 = - case binary:last(Url) of - $/ -> + % case binary:last(Url) of + % $/ -> Priv = priv_dir(), Filename = filename:join([Priv, "public", "dist", "index.html"]), case file:read_file(Filename) of @@ -120,9 +120,9 @@ init(Req, State) -> >>, Req ) - end; - _ -> - cowboy_req:reply(301, #{<<"location">> => <>}, Req) + % end; + % _ -> + % cowboy_req:reply(301, #{<<"location">> => <>}, Req) end, {ok, Req1, State}. diff --git a/src/dderl_session.erl b/src/dderl_session.erl index 83566e5c..9f301d29 100644 --- a/src/dderl_session.erl +++ b/src/dderl_session.erl @@ -380,6 +380,40 @@ process_call({[<<"about">>], _ReqData}, _Adapter, From, {SrcIp,_}, State) -> reply(From, [{<<"about">>, Versions}], self()), State; +process_call({[<<"office_365_code">>], ReqData}, _Adapter, From, {SrcIp, _}, State) -> + act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "format_json_to_save", args => ReqData}, State), + #{<<"office_365_code">> := BodyJson} = jsx:decode(ReqData, [return_maps]), + #{<<"code">> := Code, <<"state">> := UrlState} = BodyJson, + ?Info("!!!! office 365 code is : ~p, state : ~p", [Code, UrlState]), + Url = "https://login.microsoftonline.com/common/oauth2/v2.0/token", + Head = "", + ContentType = "application/x-www-form-urlencoded", + ClientSecret = "Ltzu6fr1f2Jeps4~0VI41zzY1GOf~drv_~", + + Body = "client_id=353d2e74-ac01-4f48-8b84-d3017e7f91f4 + &scope=https%3A%2F%2Fgraph.microsoft.com%2Fpeople.read + &code=" ++ binary_to_list(Code) ++ " + &redirect_uri=https%3A%2F%2Flocalhost%3A8443%2Fdderl%2F + &grant_type=authorization_code + &client_secret=" ++ http_uri:encode(ClientSecret), + + case httpc:request(post, {Url, Head, ContentType, Body}, [], []) of + {ok, {_, _, TokenBody}} -> + #{<<"access_token">> := AccessToken, <<"refresh_token">> := RefreshToken} = imem_json:decode(list_to_binary(TokenBody), [return_maps]), + reply(From, #{<<"office_365_code">> => #{<<"status">> => <<"ok">>}}, self()), + ?Info("Fetched access token and refresh token ~p, ~p", [AccessToken, size(RefreshToken)]), + ?Info("Fetching email"), + MailUrl = "https://graph.microsoft.com/v1.0/me/people/?$top=1000&$Select=displayName&$orderby=displayName", + AuthHeader = {"Authorization", "Bearer " ++ binary_to_list(AccessToken)}, + OtherHeader = {"X-PeopleQuery-QuerySources", "Mailbox,Directory"}, + {ok, {_, _, Mails}} = httpc:request(get, {MailUrl, [AuthHeader, OtherHeader]}, [], []), + ?Info("Emails : ~p", [imem_json:decode(list_to_binary(Mails), [return_maps])]); + {error, Error} -> + ?Error("Fetching access token : ~p", [Error]), + reply(From, #{<<"office_365_code">> => #{<<"error">> => <<"Fetching token failed, Try again">>}}, self()) + end, + State; + process_call({[<<"connect_info">>], _ReqData}, _Adapter, From, {SrcIp,_}, #state{sess=Sess, user_id=UserId, user = User} = State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "connect_info"}, State), From 848f595c29bf16ba15db0d47ec7468aaac416400 Mon Sep 17 00:00:00 2001 From: shamis Date: Mon, 18 May 2020 12:02:38 +0200 Subject: [PATCH 24/72] dperl office 365 contacts job --- priv/dev/static/scripts/login.js | 14 +- src/dderl_session.erl | 52 +++++--- src/dperl/jobs/dperl_office_365.erl | 200 ++++++++++++++++++++++++++++ 3 files changed, 238 insertions(+), 28 deletions(-) create mode 100644 src/dperl/jobs/dperl_office_365.erl diff --git a/priv/dev/static/scripts/login.js b/priv/dev/static/scripts/login.js index 229e24db..85b8c898 100644 --- a/priv/dev/static/scripts/login.js +++ b/priv/dev/static/scripts/login.js @@ -23,11 +23,11 @@ export function loginAjax(data = {}) { window.loginCb = loginCb; +// office 365 callback handler const url = new URL(window.location.href); const code = url.searchParams.get('code'); const state = url.searchParams.get('state'); if(code) { - // alert("code is : " + code + ' state is : ' + state); dderlState.xsrfToken = state; const body = { 'office_365_code': { 'code': code, 'state': state } }; ajaxCall(null, 'office_365_code', body, 'office_365_code', function () { window.close(); }, @@ -36,7 +36,9 @@ if(code) { const error = url.searchParams.get('error'); const errorDesc = url.searchParams.get('error_description'); alert_jq('Login error : ' + error + ' : ' + errorDesc); + window.close(); } +// office 365 callback handler end function loginCb(resp) { try { @@ -417,8 +419,8 @@ export function change_login_password(loggedInUser, shouldConnect) { } export function authorize_office() { - const windowUrl = 'https://login.microsoftonline.com/common/oauth2/v2.0/authorize?client_id=353d2e74-ac01-4f48-8b84-d3017e7f91f4&response_type=code&redirect_uri=https%3A%2F%2Flocalhost:8443%2Fdderl%2F&response_mode=query&scope=openid%20offline_access%20https%3A%2F%2Fgraph.microsoft.com%2Fpeople.read&state=' + encodeURIComponent(dderlState.xsrfToken); - const params = 'scrollbars=no,resizable=no,status=no,location=no,toolbar=no,menubar=no,width=500,height=700,left=100,top=100'; - const windowRef = window.open(windowUrl, 'Office 365 login', params); - console.log(windowRef); -} \ No newline at end of file + ajaxCall(null, 'office_365_auth_config', {}, 'office_365_auth_config', function(auth_config) { + const params = 'scrollbars=no,resizable=no,status=no,location=no,toolbar=no,menubar=no,width=500,height=600,left=100,top=100'; + window.open(auth_config.url, 'Office 365 login', params); + }); +} diff --git a/src/dderl_session.erl b/src/dderl_session.erl index 9f301d29..d9af5e3e 100644 --- a/src/dderl_session.erl +++ b/src/dderl_session.erl @@ -380,34 +380,33 @@ process_call({[<<"about">>], _ReqData}, _Adapter, From, {SrcIp,_}, State) -> reply(From, [{<<"about">>, Versions}], self()), State; +process_call({[<<"office_365_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, _}, State) -> + act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "about"}, State), + URLState = http_uri:encode(State#state.xsrf_token), + #{auth_url := Url, client_id := ClientId, redirect_uri := RedirectURI, + scope := Scope} = dperl_office_365:get_office_365_auth_config(), + UrlParams = url_enc_params(#{"client_id" => ClientId, "redirect_uri" => {enc, RedirectURI}, + "scope" => {enc, Scope}, "state" => URLState}), + FinalUrl = erlang:iolist_to_binary([Url, "&", UrlParams]), + reply(From, #{<<"office_365_auth_config">> => #{<<"url">> => FinalUrl}}, self()), + State; + process_call({[<<"office_365_code">>], ReqData}, _Adapter, From, {SrcIp, _}, State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "format_json_to_save", args => ReqData}, State), #{<<"office_365_code">> := BodyJson} = jsx:decode(ReqData, [return_maps]), - #{<<"code">> := Code, <<"state">> := UrlState} = BodyJson, - ?Info("!!!! office 365 code is : ~p, state : ~p", [Code, UrlState]), - Url = "https://login.microsoftonline.com/common/oauth2/v2.0/token", - Head = "", + #{<<"code">> := Code} = BodyJson, + #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, + client_secret := Secret, grant_type := GrantType, + scope := Scope} = dperl_office_365:get_office_365_auth_config(), + Body = url_enc_params(#{"client_id" => ClientId, "scope" => {enc, Scope}, "code" => Code, + "redirect_uri" => {enc, RedirectURI}, "grant_type" => GrantType, + "client_secret" => {enc, Secret}}), ContentType = "application/x-www-form-urlencoded", - ClientSecret = "Ltzu6fr1f2Jeps4~0VI41zzY1GOf~drv_~", - - Body = "client_id=353d2e74-ac01-4f48-8b84-d3017e7f91f4 - &scope=https%3A%2F%2Fgraph.microsoft.com%2Fpeople.read - &code=" ++ binary_to_list(Code) ++ " - &redirect_uri=https%3A%2F%2Flocalhost%3A8443%2Fdderl%2F - &grant_type=authorization_code - &client_secret=" ++ http_uri:encode(ClientSecret), - - case httpc:request(post, {Url, Head, ContentType, Body}, [], []) of + case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {_, _, TokenBody}} -> - #{<<"access_token">> := AccessToken, <<"refresh_token">> := RefreshToken} = imem_json:decode(list_to_binary(TokenBody), [return_maps]), + TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), reply(From, #{<<"office_365_code">> => #{<<"status">> => <<"ok">>}}, self()), - ?Info("Fetched access token and refresh token ~p, ~p", [AccessToken, size(RefreshToken)]), - ?Info("Fetching email"), - MailUrl = "https://graph.microsoft.com/v1.0/me/people/?$top=1000&$Select=displayName&$orderby=displayName", - AuthHeader = {"Authorization", "Bearer " ++ binary_to_list(AccessToken)}, - OtherHeader = {"X-PeopleQuery-QuerySources", "Mailbox,Directory"}, - {ok, {_, _, Mails}} = httpc:request(get, {MailUrl, [AuthHeader, OtherHeader]}, [], []), - ?Info("Emails : ~p", [imem_json:decode(list_to_binary(Mails), [return_maps])]); + dperl_office_365:set_token_info(TokenInfo); {error, Error} -> ?Error("Fetching access token : ~p", [Error]), reply(From, #{<<"office_365_code">> => #{<<"error">> => <<"Fetching token failed, Try again">>}}, self()) @@ -911,3 +910,12 @@ act_log(ReplyPid, LogLevel, Args, State) -> logLevel => LogLevel, isProxy => State#state.is_proxy} }. + +url_enc_params(Params) -> + EParams = maps:fold( + fun(K, {enc, V}, Acc) -> + ["&", K, "=", http_uri:encode(V) | Acc]; + (K, V, Acc) -> + ["&", K, "=", V | Acc] + end, [], Params), + erlang:iolist_to_binary([tl(EParams)]). \ No newline at end of file diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl new file mode 100644 index 00000000..f9588024 --- /dev/null +++ b/src/dperl/jobs/dperl_office_365.erl @@ -0,0 +1,200 @@ +-module(dperl_office_365). + +-include_lib("dperl/dperl.hrl"). + +-behavior(dperl_worker). +-behavior(dperl_strategy_scr). + +-define(OFFICE_365_AUTH_CONFIG, + ?GET_CONFIG(office365AuthConfig,[], + #{auth_url =>"https://login.microsoftonline.com/common/oauth2/v2.0/authorize?response_type=code&response_mode=query", + client_id => "12345", redirect_uri => "https://localhost:8443/dderl/", client_secret => "12345", grant_type => "authorization_code", + token_url => "https://login.microsoftonline.com/common/oauth2/v2.0/token", + scope => "offline_access https://graph.microsoft.com/people.read"}, + "Office 365 (Graph API) auth config")). + +% dperl_worker exports +-export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, + get_status/1, init_state/1]). + +-export([get_office_365_auth_config/0, set_token_info/1]). + +-record(state, {name, channel, is_connected = true, access_token, api_url, + infos = [], key_prefix, fetch_url}). + +% dperl_strategy_scr export +-export([connect_check_src/1, get_source_events/2, connect_check_dst/1, + do_cleanup/2, do_refresh/2, + fetch_src/2, fetch_dst/2, delete_dst/2, insert_dst/3, + update_dst/3, report_status/3]). + +get_office_365_auth_config() -> + ?OFFICE_365_AUTH_CONFIG. + +get_token_info() -> + dperl_dal:read_channel(<<"avatar">>, ["office365","token"]). + +set_token_info(TokenInfo) when is_map(TokenInfo) -> + set_token_info(imem_json:encode(TokenInfo)); +set_token_info(TokenInfo) when is_binary(TokenInfo) -> + dperl_dal:create_check_channel(<<"avatar">>), + dperl_dal:write_channel(<<"avatar">>, ["office365","token"], TokenInfo). + +connect_check_src(#state{is_connected = true} = State) -> + {ok, State}; +connect_check_src(#state{is_connected = false} = State) -> + ?Info("Refreshing access token"), + #{token_url := TUrl, client_id := ClientId, client_secret := Secret, + scope := Scope} = get_office_365_auth_config(), + #{<<"refresh_token">> := RefreshToken} = get_token_info(), + Body = url_enc_params(#{"client_id" => ClientId, "scope" => {enc, Scope}, + "refresh_token" => RefreshToken, "grant_type" => "refresh_token", + "client_secret" => {enc, Secret}}), + ContentType = "application/x-www-form-urlencoded", + case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of + {ok, {{_, 200, "OK"}, _, TokenBody}} -> + TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), + set_token_info(TokenBody), + #{<<"access_token">> := AccessToken} = TokenInfo, + {ok, State#state{access_token = AccessToken, is_connected = true}}; + Error -> + ?JError("Unexpected response : ~p", [Error]), + {ok, State} + end. + +get_source_events(#state{infos = []} = State, _BulkSize) -> + {ok, sync_complete, State}; +get_source_events(#state{infos = Infos} = State, _BulkSize) -> + {ok, Infos, State#state{infos = []}}. + +connect_check_dst(State) -> {ok, State}. + +do_refresh(_State, _BulkSize) -> {error, cleanup_only}. + +fetch_src({_Key, Value}, _State) -> Value. + +fetch_dst({Key, _}, State) -> + dperl_dal:read_channel(State#state.channel, Key). + +insert_dst(Key, Val, State) -> + update_dst(Key, Val, State). + +report_status(_Key, _Status, _State) -> no_op. + +do_cleanup(State, BlkCount) -> + case fetch_contacts(State, BlkCount) of + {ok, State1} -> + case State1#state.infos of + Infos when length(Infos) < BlkCount -> + {ok, finish, State1}; + _ -> + {ok, State1} + end; + {error, unauthorized} -> + ?Info("Access token has been expired"), + {ok, State#state{is_connected = false}}; + {error, Error} -> + {error, Error, State#state{is_connected = false}} + end. + +delete_dst(Key, #state{channel = Channel} = State) -> + ?JInfo("Deleting : ~p", [Key]), + dperl_dal:remove_from_channel(Channel, Key), + {false, State}. + +update_dst({Key, _}, Val, State) -> + update_dst(Key, Val, State); +update_dst(Key, Val, #state{channel = Channel} = State) when is_binary(Val) -> + dperl_dal:write_channel(Channel, Key, Val), + {false, State}; +update_dst(Key, Val, State) -> + update_dst(Key, imem_json:encode(Val), State). + +get_status(#state{}) -> #{}. + +init_state(_) -> #state{}. + +init({#dperlJob{name=Name, dstArgs = #{channel := Channel}, + srcArgs = #{api_url := ApiUrl} = SrcArgs}, State}) -> + % case dperl_auth_cache:get_enc_hash(Name) of + % undefined -> + % ?JError("Encryption hash is not avaialable"), + % {stop, badarg}; + % {User, EncHash} -> + % ?JInfo("Starting with ~p's enchash...", [User]), + % imem_sec_mnesia:put_enc_hash(EncHash), + case get_token_info() of + #{<<"access_token">> := AccessToken} -> + ChannelBin = dperl_dal:to_binary(Channel), + KeyPrefix = maps:get(key_prefix, SrcArgs, []), + dperl_dal:create_check_channel(ChannelBin), + {ok, State#state{channel = ChannelBin, name = Name, api_url = ApiUrl, + key_prefix = KeyPrefix, access_token = AccessToken}}; + _ -> + ?JError("Access token not found"), + {stop, badarg} + end; +init(Args) -> + ?JError("bad start parameters ~p", [Args]), + {stop, badarg}. + +handle_call(Request, _From, State) -> + ?JWarn("Unsupported handle_call ~p", [Request]), + {reply, ok, State}. + +handle_cast(Request, State) -> + ?JWarn("Unsupported handle_cast ~p", [Request]), + {noreply, State}. + +handle_info(Request, State) -> + ?JWarn("Unsupported handle_info ~p", [Request]), + {noreply, State}. + +terminate(Reason, _State) -> + httpc:reset_cookies(?MODULE), + ?JInfo("terminate ~p", [Reason]). + +%% private functions + +fetch_contacts(#state{fetch_url = undefined} = State, BlkCount) -> + % https://graph.microsoft.com/v1.0/me/contacts/?$top=100&$select=displayName&orderby=displayName + UrlParams = url_enc_params(#{"$top" => integer_to_list(BlkCount), "$orderby" => "displayName"}), + ContactsUrl = erlang:iolist_to_binary([State#state.api_url, "?", UrlParams]), + fetch_contacts(State#state{fetch_url = ContactsUrl}, BlkCount); +fetch_contacts(#state{fetch_url = FetchUrl, key_prefix = KeyPrefix} = State, _BlkCount) -> + case exec_req(FetchUrl, State#state.access_token) of + #{<<"@odata.nextLink">> := NextLink, <<"value">> := Contacts} -> + {ok, State#state{fetch_url = NextLink, infos = format_contacts(Contacts, KeyPrefix)}}; + #{<<"value">> := Contacts} -> + {ok, State#state{fetch_url = undefined, infos = format_contacts(Contacts, KeyPrefix)}}; + Error -> + Error +end. + +format_contacts([], _) -> []; +format_contacts([#{<<"displayName">> := NameBin} = Contact | Contacts], KeyPrefix) -> + Name = binary_to_list(NameBin), + Key = KeyPrefix ++ [Name], + [{Key, Contact} | format_contacts(Contacts, KeyPrefix)]. + +exec_req(Url, AccessToken) when is_binary(Url) -> + exec_req(binary_to_list(Url), AccessToken); +exec_req(Url, AccessToken) -> + AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}], + case httpc:request(get, {Url, AuthHeader}, [], []) of + {ok, {{_, 200, "OK"}, _, Result}} -> + imem_json:decode(list_to_binary(Result), [return_maps]); + {ok, {{_, 401, _}, _, _}} -> + {error, unauthorized}; + Error -> + {error, Error} + end. + +url_enc_params(Params) -> + EParams = maps:fold( + fun(K, {enc, V}, Acc) -> + ["&", K, "=", http_uri:encode(V) | Acc]; + (K, V, Acc) -> + ["&", K, "=", V | Acc] + end, [], Params), + erlang:iolist_to_binary([tl(EParams)]). From a230743d25d1584dce4f05a48a71ab0828c43048 Mon Sep 17 00:00:00 2001 From: shamis Date: Tue, 19 May 2020 09:51:37 +0200 Subject: [PATCH 25/72] refactored office 365 login --- src/dderl_session.erl | 39 ++++++----------------------- src/dperl/jobs/dperl_office_365.erl | 27 +++++++++++++++++++- 2 files changed, 33 insertions(+), 33 deletions(-) diff --git a/src/dderl_session.erl b/src/dderl_session.erl index d9af5e3e..d4ec6747 100644 --- a/src/dderl_session.erl +++ b/src/dderl_session.erl @@ -382,33 +382,17 @@ process_call({[<<"about">>], _ReqData}, _Adapter, From, {SrcIp,_}, State) -> process_call({[<<"office_365_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, _}, State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "about"}, State), - URLState = http_uri:encode(State#state.xsrf_token), - #{auth_url := Url, client_id := ClientId, redirect_uri := RedirectURI, - scope := Scope} = dperl_office_365:get_office_365_auth_config(), - UrlParams = url_enc_params(#{"client_id" => ClientId, "redirect_uri" => {enc, RedirectURI}, - "scope" => {enc, Scope}, "state" => URLState}), - FinalUrl = erlang:iolist_to_binary([Url, "&", UrlParams]), - reply(From, #{<<"office_365_auth_config">> => #{<<"url">> => FinalUrl}}, self()), + Url = dperl_office_365:get_authorize_url(State#state.xsrf_token), + reply(From, #{<<"office_365_auth_config">> => #{<<"url">> => Url}}, self()), State; process_call({[<<"office_365_code">>], ReqData}, _Adapter, From, {SrcIp, _}, State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "format_json_to_save", args => ReqData}, State), - #{<<"office_365_code">> := BodyJson} = jsx:decode(ReqData, [return_maps]), - #{<<"code">> := Code} = BodyJson, - #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, - client_secret := Secret, grant_type := GrantType, - scope := Scope} = dperl_office_365:get_office_365_auth_config(), - Body = url_enc_params(#{"client_id" => ClientId, "scope" => {enc, Scope}, "code" => Code, - "redirect_uri" => {enc, RedirectURI}, "grant_type" => GrantType, - "client_secret" => {enc, Secret}}), - ContentType = "application/x-www-form-urlencoded", - case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of - {ok, {_, _, TokenBody}} -> - TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), - reply(From, #{<<"office_365_code">> => #{<<"status">> => <<"ok">>}}, self()), - dperl_office_365:set_token_info(TokenInfo); - {error, Error} -> - ?Error("Fetching access token : ~p", [Error]), + #{<<"office_365_code">> := #{<<"code">> := Code}} = jsx:decode(ReqData, [return_maps]), + case dperl_office_365:get_access_token(Code) of + ok -> + reply(From, #{<<"office_365_code">> => #{<<"status">> => <<"ok">>}}, self()); + {error, _Error} -> reply(From, #{<<"office_365_code">> => #{<<"error">> => <<"Fetching token failed, Try again">>}}, self()) end, State; @@ -910,12 +894,3 @@ act_log(ReplyPid, LogLevel, Args, State) -> logLevel => LogLevel, isProxy => State#state.is_proxy} }. - -url_enc_params(Params) -> - EParams = maps:fold( - fun(K, {enc, V}, Acc) -> - ["&", K, "=", http_uri:encode(V) | Acc]; - (K, V, Acc) -> - ["&", K, "=", V | Acc] - end, [], Params), - erlang:iolist_to_binary([tl(EParams)]). \ No newline at end of file diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index f9588024..e99e33c6 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -17,7 +17,7 @@ -export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, get_status/1, init_state/1]). --export([get_office_365_auth_config/0, set_token_info/1]). +-export([get_authorize_url/1, get_access_token/1]). -record(state, {name, channel, is_connected = true, access_token, api_url, infos = [], key_prefix, fetch_url}). @@ -40,6 +40,31 @@ set_token_info(TokenInfo) when is_binary(TokenInfo) -> dperl_dal:create_check_channel(<<"avatar">>), dperl_dal:write_channel(<<"avatar">>, ["office365","token"], TokenInfo). +get_authorize_url(XSRFToken) -> + URLState = http_uri:encode(XSRFToken), + #{auth_url := Url, client_id := ClientId, redirect_uri := RedirectURI, + scope := Scope} = get_office_365_auth_config(), + UrlParams = url_enc_params(#{"client_id" => ClientId, "redirect_uri" => {enc, RedirectURI}, + "scope" => {enc, Scope}, "state" => URLState}), + erlang:iolist_to_binary([Url, "&", UrlParams]). + +get_access_token(Code) -> + #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, + client_secret := Secret, grant_type := GrantType, + scope := Scope} = get_office_365_auth_config(), + Body = url_enc_params(#{"client_id" => ClientId, "scope" => {enc, Scope}, "code" => Code, + "redirect_uri" => {enc, RedirectURI}, "grant_type" => GrantType, + "client_secret" => {enc, Secret}}), + ContentType = "application/x-www-form-urlencoded", + case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of + {ok, {_, _, TokenInfo}} -> + set_token_info(list_to_binary(TokenInfo)), + ok; + {error, Error} -> + ?Error("Fetching access token : ~p", [Error]), + {error, Error} + end. + connect_check_src(#state{is_connected = true} = State) -> {ok, State}; connect_check_src(#state{is_connected = false} = State) -> From 68777262f877ce4cbcd417cf0a159259e3523f19 Mon Sep 17 00:00:00 2001 From: shamis Date: Tue, 19 May 2020 12:21:50 +0200 Subject: [PATCH 26/72] using id in the key --- src/dperl/jobs/dperl_office_365.erl | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index e99e33c6..c6b55bb4 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -109,12 +109,9 @@ report_status(_Key, _Status, _State) -> no_op. do_cleanup(State, BlkCount) -> case fetch_contacts(State, BlkCount) of {ok, State1} -> - case State1#state.infos of - Infos when length(Infos) < BlkCount -> - {ok, finish, State1}; - _ -> - {ok, State1} - end; + {ok, State1}; + {ok, finish, State1} -> + {ok, finish, State1}; {error, unauthorized} -> ?Info("Access token has been expired"), {ok, State#state{is_connected = false}}; @@ -191,15 +188,15 @@ fetch_contacts(#state{fetch_url = FetchUrl, key_prefix = KeyPrefix} = State, _Bl #{<<"@odata.nextLink">> := NextLink, <<"value">> := Contacts} -> {ok, State#state{fetch_url = NextLink, infos = format_contacts(Contacts, KeyPrefix)}}; #{<<"value">> := Contacts} -> - {ok, State#state{fetch_url = undefined, infos = format_contacts(Contacts, KeyPrefix)}}; + {ok, finish, State#state{fetch_url = undefined, infos = format_contacts(Contacts, KeyPrefix)}}; Error -> Error end. format_contacts([], _) -> []; -format_contacts([#{<<"displayName">> := NameBin} = Contact | Contacts], KeyPrefix) -> - Name = binary_to_list(NameBin), - Key = KeyPrefix ++ [Name], +format_contacts([#{<<"id">> := IdBin} = Contact | Contacts], KeyPrefix) -> + Id = binary_to_list(IdBin), + Key = KeyPrefix ++ [Id], [{Key, Contact} | format_contacts(Contacts, KeyPrefix)]. exec_req(Url, AccessToken) when is_binary(Url) -> From adef50867dfab07bad928b8ec218b06aaf31db5f Mon Sep 17 00:00:00 2001 From: Shamis Shukoor Date: Wed, 20 May 2020 12:59:42 +0200 Subject: [PATCH 27/72] set_token_info for list --- src/dperl/jobs/dperl_office_365.erl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index c6b55bb4..af4842ac 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -36,6 +36,8 @@ get_token_info() -> set_token_info(TokenInfo) when is_map(TokenInfo) -> set_token_info(imem_json:encode(TokenInfo)); +set_token_info(TokenInfo) when is_list(TokenInfo) -> + set_token_info(list_to_binary(TokenInfo)); set_token_info(TokenInfo) when is_binary(TokenInfo) -> dperl_dal:create_check_channel(<<"avatar">>), dperl_dal:write_channel(<<"avatar">>, ["office365","token"], TokenInfo). @@ -58,7 +60,7 @@ get_access_token(Code) -> ContentType = "application/x-www-form-urlencoded", case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {_, _, TokenInfo}} -> - set_token_info(list_to_binary(TokenInfo)), + set_token_info(TokenInfo), ok; {error, Error} -> ?Error("Fetching access token : ~p", [Error]), From 61ffa66710433dc63aa9443ab829feedcda2f6ac Mon Sep 17 00:00:00 2001 From: shamis Date: Wed, 20 May 2020 17:09:25 +0200 Subject: [PATCH 28/72] cleanup WIP --- src/dperl/dperl_strategy_scr.erl | 44 +++++++++----- src/dperl/jobs/dperl_office_365.erl | 89 ++++++++++++++++++++--------- 2 files changed, 92 insertions(+), 41 deletions(-) diff --git a/src/dperl/dperl_strategy_scr.erl b/src/dperl/dperl_strategy_scr.erl index da45ec4f..36b55313 100644 --- a/src/dperl/dperl_strategy_scr.erl +++ b/src/dperl/dperl_strategy_scr.erl @@ -250,8 +250,8 @@ execute(cleanup, Mod, Job, State, #{cleanup := true} = Args) -> true -> #{minKey := MinKey, maxKey := MaxKey, lastKey := LastKey} = CleanupState, - #{deletes := Deletes, inserts := Inserts, - differences := Diffs, lastKey := NextLastKey} = + {#{deletes := Deletes, inserts := Inserts, + differences := Diffs, lastKey := NextLastKey}, State2} = cleanup_refresh_collect( Mod, #cleanup_ctx{minKey = MinKey, maxKey = MaxKey, @@ -279,8 +279,8 @@ execute(cleanup, Mod, Job, State, #{cleanup := true} = Args) -> cleanup_log("Missing", Inserts), cleanup_log("Difference", Diffs), case erlang:function_exported(Mod, do_cleanup, 5) of - true -> [Deletes, Inserts, Diffs, NextLastKey == MinKey, State1]; - false -> [Deletes, Inserts, NextLastKey == MinKey, State1] + true -> [Deletes, Inserts, Diffs, NextLastKey == MinKey, State2]; + false -> [Deletes, Inserts, NextLastKey == MinKey, State2] end end, case apply(Mod, do_cleanup, DoCleanupArgs) of @@ -635,11 +635,27 @@ cleanup_refresh_collect(Mod, LastKey >= MaxKey -> MinKey; % out of key bounds by re-config true -> LastKey end, - SrcKeys = Mod:load_src_after_key(CurKey, BulkCnt, State), - DstKeys = Mod:load_dst_after_key(CurKey, BulkCnt, State), - cleanup_refresh_compare(CleanupCtx#cleanup_ctx{ + {SrcKeys, State2} = + case Mod:load_src_after_key(CurKey, BulkCnt, State) of + {ok, SKeys, State1} -> {SKeys, State1}; + {error, Error, State1} -> + ?JError("cleanup failed at load_src_after_key : ~p", [Error]), + dperl_dal:job_error(<<"cleanup">>, <<"load_src_after_key">>, Error), + error({step_failed, State1}); + SKeys -> {SKeys, State} + end, + {DstKeys, State4} = + case Mod:load_dst_after_key(CurKey, BulkCnt, State2) of + {ok, DKeys, State3} -> {DKeys, State3}; + {error, Error1, State3} -> + ?JError("cleanup failed at load_dst_after_key : ~p", [Error1]), + dperl_dal:job_error(<<"cleanup">>, <<"load_dst_after_key">>, Error1), + error({step_failed, State3}); + DKeys -> {DKeys, State2} + end, + {cleanup_refresh_compare(CleanupCtx#cleanup_ctx{ srcKeys = SrcKeys, srcCount = length(SrcKeys), - dstKeys = DstKeys, dstCount = length(DstKeys), lastKey = CurKey}). + dstKeys = DstKeys, dstCount = length(DstKeys), lastKey = CurKey}), State4}. -spec cleanup_refresh_compare(#cleanup_ctx{}) -> #{deletes => list(), differences => list(), inserts => list(), lastKey => any()}. cleanup_refresh_compare(#cleanup_ctx{ @@ -734,7 +750,7 @@ cleanup_refresh_compare_test() -> SrcKeys = lists:usort([rand:uniform(3000) || _ <- lists:seq(1, SrcCount)]), DstCount = rand:uniform(1000), DstKeys = lists:usort([rand:uniform(3000) || _ <- lists:seq(1, DstCount)]), - #{deletes := Dels, inserts := Ins} = + {#{deletes := Dels, inserts := Ins}, _} = cleanup_refresh_collect(?MODULE, #cleanup_ctx{minKey = -1, maxKey = <<255>>, lastKey = 0, bulkCount = BulkCnt}, @@ -781,7 +797,7 @@ complete_cleanup_refresh(AllSrcKeys, AllDstKeys, BulkCnt) -> cleanup_refresh_loop(_, -1, _, Acc) -> Acc; cleanup_refresh_loop(Ctx, CurKey, AllKeys, Acc) -> - #{deletes := Dels, differences := Diffs, inserts := Ins, lastKey := LastKey} = + {#{deletes := Dels, differences := Diffs, inserts := Ins, lastKey := LastKey}, _} = cleanup_refresh_collect(?MODULE, Ctx#cleanup_ctx{lastKey = CurKey}, AllKeys), NewAcc = Acc#{deletes => Dels ++ maps:get(deletes, Acc, []), differences => Diffs ++ maps:get(differences, Acc, []), @@ -846,7 +862,7 @@ cleanup_refresh_boundary_test() -> cleanup_refresh_only_dels_test() -> AllKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), rand:uniform(5000)} || _ <- lists:seq(1, 2000)]))), DeleteKeys = [{6000, 6}, {7000, 7}, {8000, 8}], - #{inserts := Ins, deletes := Dels, differences := Diffs} = complete_cleanup_refresh(AllKeys, AllKeys ++ DeleteKeys, 2000), + {#{inserts := Ins, deletes := Dels, differences := Diffs}, _} = complete_cleanup_refresh(AllKeys, AllKeys ++ DeleteKeys, 2000), ?assertEqual([], Diffs), ?assertEqual([], Ins), ?assertEqual([6000, 7000, 8000], Dels). @@ -854,14 +870,14 @@ cleanup_refresh_only_dels_test() -> cleanup_refresh_only_ins_test() -> AllKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), rand:uniform(5000)} || _ <- lists:seq(1, 2000)]))), InsertKeys = [{6000, 6}, {7000, 7}, {8000, 8}], - #{inserts := Ins, deletes := Dels, differences := Diffs} = complete_cleanup_refresh(AllKeys ++ InsertKeys, AllKeys, 2000), + {#{inserts := Ins, deletes := Dels, differences := Diffs}, _} = complete_cleanup_refresh(AllKeys ++ InsertKeys, AllKeys, 2000), ?assertEqual([], Diffs), ?assertEqual([6000, 7000, 8000], Ins), ?assertEqual([], Dels). cleanup_refresh_no_op_test() -> AllKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), rand:uniform(5000)} || _ <- lists:seq(1, 2000)]))), - #{inserts := Ins, deletes := Dels, differences := Diffs} = complete_cleanup_refresh(AllKeys, AllKeys, 2000), + {#{inserts := Ins, deletes := Dels, differences := Diffs}, _} = complete_cleanup_refresh(AllKeys, AllKeys, 2000), ?assertEqual([], Diffs), ?assertEqual([], Ins), ?assertEqual([], Dels). @@ -869,7 +885,7 @@ cleanup_refresh_no_op_test() -> cleanup_refresh_no_diff_test() -> AllSrcKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), 1} || _ <- lists:seq(1, 2000)]))), AllDstKeys = lists:sort(maps:to_list(maps:from_list([{rand:uniform(5000), 1} || _ <- lists:seq(1, 2000)]))), - #{differences := Diffs} = complete_cleanup_refresh(AllSrcKeys, AllDstKeys, 2000), + {#{differences := Diffs}, _} = complete_cleanup_refresh(AllSrcKeys, AllDstKeys, 2000), ?assertEqual([], Diffs). -endif. diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index c6b55bb4..abe64a32 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -24,7 +24,7 @@ % dperl_strategy_scr export -export([connect_check_src/1, get_source_events/2, connect_check_dst/1, - do_cleanup/2, do_refresh/2, + do_cleanup/5, do_refresh/2, load_src_after_key/3, load_dst_after_key/3, fetch_src/2, fetch_dst/2, delete_dst/2, insert_dst/3, update_dst/3, report_status/3]). @@ -36,6 +36,8 @@ get_token_info() -> set_token_info(TokenInfo) when is_map(TokenInfo) -> set_token_info(imem_json:encode(TokenInfo)); +set_token_info(TokenInfo) when is_list(TokenInfo) -> + set_token_info(list_to_binary(TokenInfo)); set_token_info(TokenInfo) when is_binary(TokenInfo) -> dperl_dal:create_check_channel(<<"avatar">>), dperl_dal:write_channel(<<"avatar">>, ["office365","token"], TokenInfo). @@ -58,7 +60,7 @@ get_access_token(Code) -> ContentType = "application/x-www-form-urlencoded", case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {_, _, TokenInfo}} -> - set_token_info(list_to_binary(TokenInfo)), + set_token_info(TokenInfo), ok; {error, Error} -> ?Error("Fetching access token : ~p", [Error]), @@ -96,9 +98,15 @@ connect_check_dst(State) -> {ok, State}. do_refresh(_State, _BulkSize) -> {error, cleanup_only}. -fetch_src({_Key, Value}, _State) -> Value. +fetch_src(Key, #state{api_url = ApiUrl} = State) -> + Id = Key -- State#state.key_prefix, + FetchUrl = erlang:iolist_to_binary([ApiUrl, Id]), + case exec_req(FetchUrl, State#state.access_token) of + Value when is_map(Value) -> Value; + {error, Error} -> {error, Error, State} + end. -fetch_dst({Key, _}, State) -> +fetch_dst(Key, State) -> dperl_dal:read_channel(State#state.channel, Key). insert_dst(Key, Val, State) -> @@ -106,19 +114,46 @@ insert_dst(Key, Val, State) -> report_status(_Key, _Status, _State) -> no_op. -do_cleanup(State, BlkCount) -> - case fetch_contacts(State, BlkCount) of - {ok, State1} -> - {ok, State1}; - {ok, finish, State1} -> - {ok, finish, State1}; +load_dst_after_key(CurKey, BlkCount, #state{channel = Channel}) -> + dperl_dal:read_gt(Channel, CurKey, BlkCount). + +load_src_after_key(CurKey, BlkCount, #state{fetch_url = undefined} = State) -> + % https://graph.microsoft.com/v1.0/me/contacts/?$top=100&$select=displayName&orderby=displayName + UrlParams = url_enc_params(#{"$top" => integer_to_list(BlkCount), "$orderby" => "id"}), + ContactsUrl = erlang:iolist_to_binary([State#state.api_url, "?", UrlParams]), + load_src_after_key(CurKey, BlkCount, State#state{fetch_url = ContactsUrl}); +load_src_after_key(_CurKey, _BlkCount, #state{fetch_url = FetchUrl, key_prefix = KeyPrefix} = State) -> + case exec_req(FetchUrl, State#state.access_token) of + #{<<"@odata.nextLink">> := NextLink, <<"value">> := Contacts} -> + {ok, format_contacts(Contacts, KeyPrefix), State#state{fetch_url = NextLink}}; + #{<<"value">> := Contacts} -> + {ok, format_contacts(Contacts, KeyPrefix), State#state{fetch_url = undefined}}; {error, unauthorized} -> - ?Info("Access token has been expired"), - {ok, State#state{is_connected = false}}; + {error, unauthorized, State#state{is_connected = false}}; {error, Error} -> - {error, Error, State#state{is_connected = false}} + {error, Error, State} + end. + +do_cleanup(Deletes, Inserts, Diffs, IsFinished, State) -> + NewState = State#state{infos = Inserts ++ Diffs ++ Deletes}, + if IsFinished -> {ok, finish, NewState}; + true -> {ok, NewState} end. + +% do_cleanup(State, BlkCount) -> +% case fetch_contacts(State, BlkCount) of +% {ok, State1} -> +% {ok, State1}; +% {ok, finish, State1} -> +% {ok, finish, State1}; +% {error, unauthorized} -> +% ?Info("Access token has been expired"), +% {ok, State#state{is_connected = false}}; +% {error, Error} -> +% {error, Error, State#state{is_connected = false}} +% end. + delete_dst(Key, #state{channel = Channel} = State) -> ?JInfo("Deleting : ~p", [Key]), dperl_dal:remove_from_channel(Channel, Key), @@ -178,20 +213,20 @@ terminate(Reason, _State) -> %% private functions -fetch_contacts(#state{fetch_url = undefined} = State, BlkCount) -> - % https://graph.microsoft.com/v1.0/me/contacts/?$top=100&$select=displayName&orderby=displayName - UrlParams = url_enc_params(#{"$top" => integer_to_list(BlkCount), "$orderby" => "displayName"}), - ContactsUrl = erlang:iolist_to_binary([State#state.api_url, "?", UrlParams]), - fetch_contacts(State#state{fetch_url = ContactsUrl}, BlkCount); -fetch_contacts(#state{fetch_url = FetchUrl, key_prefix = KeyPrefix} = State, _BlkCount) -> - case exec_req(FetchUrl, State#state.access_token) of - #{<<"@odata.nextLink">> := NextLink, <<"value">> := Contacts} -> - {ok, State#state{fetch_url = NextLink, infos = format_contacts(Contacts, KeyPrefix)}}; - #{<<"value">> := Contacts} -> - {ok, finish, State#state{fetch_url = undefined, infos = format_contacts(Contacts, KeyPrefix)}}; - Error -> - Error -end. +% fetch_contacts(#state{fetch_url = undefined} = State, BlkCount) -> +% % https://graph.microsoft.com/v1.0/me/contacts/?$top=100&$select=displayName&orderby=displayName +% UrlParams = url_enc_params(#{"$top" => integer_to_list(BlkCount), "$orderby" => "displayName"}), +% ContactsUrl = erlang:iolist_to_binary([State#state.api_url, "?", UrlParams]), +% fetch_contacts(State#state{fetch_url = ContactsUrl}, BlkCount); +% fetch_contacts(#state{fetch_url = FetchUrl, key_prefix = KeyPrefix} = State, _BlkCount) -> +% case exec_req(FetchUrl, State#state.access_token) of +% #{<<"@odata.nextLink">> := NextLink, <<"value">> := Contacts} -> +% {ok, State#state{fetch_url = NextLink, infos = format_contacts(Contacts, KeyPrefix)}}; +% #{<<"value">> := Contacts} -> +% {ok, finish, State#state{fetch_url = undefined, infos = format_contacts(Contacts, KeyPrefix)}}; +% Error -> +% Error +% end. format_contacts([], _) -> []; format_contacts([#{<<"id">> := IdBin} = Contact | Contacts], KeyPrefix) -> From 401e278c53a0d94c2ae33b7ed2f1d407d6417ff2 Mon Sep 17 00:00:00 2001 From: shamis Date: Thu, 21 May 2020 13:35:45 +0200 Subject: [PATCH 29/72] contacts cleanup implemented --- src/dperl/jobs/dperl_office_365.erl | 98 ++++++++++++++--------------- 1 file changed, 48 insertions(+), 50 deletions(-) diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index abe64a32..fef40b22 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -20,7 +20,7 @@ -export([get_authorize_url/1, get_access_token/1]). -record(state, {name, channel, is_connected = true, access_token, api_url, - infos = [], key_prefix, fetch_url}). + contacts = [], key_prefix, fetch_url, cl_contacts = []}). % dperl_strategy_scr export -export([connect_check_src/1, get_source_events/2, connect_check_dst/1, @@ -89,21 +89,19 @@ connect_check_src(#state{is_connected = false} = State) -> {ok, State} end. -get_source_events(#state{infos = []} = State, _BulkSize) -> +get_source_events(#state{contacts = []} = State, _BulkSize) -> {ok, sync_complete, State}; -get_source_events(#state{infos = Infos} = State, _BulkSize) -> - {ok, Infos, State#state{infos = []}}. +get_source_events(#state{contacts = Contacts} = State, _BulkSize) -> + {ok, Contacts, State#state{contacts = []}}. connect_check_dst(State) -> {ok, State}. do_refresh(_State, _BulkSize) -> {error, cleanup_only}. -fetch_src(Key, #state{api_url = ApiUrl} = State) -> - Id = Key -- State#state.key_prefix, - FetchUrl = erlang:iolist_to_binary([ApiUrl, Id]), - case exec_req(FetchUrl, State#state.access_token) of - Value when is_map(Value) -> Value; - {error, Error} -> {error, Error, State} +fetch_src(Key, #state{cl_contacts = Contacts}) -> + case lists:keyfind(Key, 1, Contacts) of + {Key, Contact} -> Contact; + false -> ?NOT_FOUND end. fetch_dst(Key, State) -> @@ -119,43 +117,30 @@ load_dst_after_key(CurKey, BlkCount, #state{channel = Channel}) -> load_src_after_key(CurKey, BlkCount, #state{fetch_url = undefined} = State) -> % https://graph.microsoft.com/v1.0/me/contacts/?$top=100&$select=displayName&orderby=displayName - UrlParams = url_enc_params(#{"$top" => integer_to_list(BlkCount), "$orderby" => "id"}), + UrlParams = url_enc_params(#{"$top" => integer_to_list(BlkCount)}), ContactsUrl = erlang:iolist_to_binary([State#state.api_url, "?", UrlParams]), load_src_after_key(CurKey, BlkCount, State#state{fetch_url = ContactsUrl}); -load_src_after_key(_CurKey, _BlkCount, #state{fetch_url = FetchUrl, key_prefix = KeyPrefix} = State) -> - case exec_req(FetchUrl, State#state.access_token) of - #{<<"@odata.nextLink">> := NextLink, <<"value">> := Contacts} -> - {ok, format_contacts(Contacts, KeyPrefix), State#state{fetch_url = NextLink}}; - #{<<"value">> := Contacts} -> - {ok, format_contacts(Contacts, KeyPrefix), State#state{fetch_url = undefined}}; +load_src_after_key(CurKey, BlkCount, #state{cl_contacts = [], key_prefix = KeyPrefix, + access_token = AccessToken, fetch_url = FetchUrl} = State) -> + % fetch all contacts + case fetch_all_contacts(FetchUrl, AccessToken, KeyPrefix) of + {ok, Contacts} -> + load_src_after_key(CurKey, BlkCount, State#state{cl_contacts = Contacts}); {error, unauthorized} -> {error, unauthorized, State#state{is_connected = false}}; {error, Error} -> {error, Error, State} - end. + end; +load_src_after_key(CurKey, BlkCount, #state{cl_contacts = Contacts} = State) -> + {ok, get_contacts_gt(CurKey, BlkCount, Contacts), State}. do_cleanup(Deletes, Inserts, Diffs, IsFinished, State) -> - NewState = State#state{infos = Inserts ++ Diffs ++ Deletes}, - if IsFinished -> {ok, finish, NewState}; + NewState = State#state{contacts = Inserts ++ Diffs ++ Deletes}, + if IsFinished -> {ok, finish, NewState#state{cl_contacts = []}}; true -> {ok, NewState} end. - -% do_cleanup(State, BlkCount) -> -% case fetch_contacts(State, BlkCount) of -% {ok, State1} -> -% {ok, State1}; -% {ok, finish, State1} -> -% {ok, finish, State1}; -% {error, unauthorized} -> -% ?Info("Access token has been expired"), -% {ok, State#state{is_connected = false}}; -% {error, Error} -> -% {error, Error, State#state{is_connected = false}} -% end. - delete_dst(Key, #state{channel = Channel} = State) -> - ?JInfo("Deleting : ~p", [Key]), dperl_dal:remove_from_channel(Channel, Key), {false, State}. @@ -213,27 +198,40 @@ terminate(Reason, _State) -> %% private functions -% fetch_contacts(#state{fetch_url = undefined} = State, BlkCount) -> -% % https://graph.microsoft.com/v1.0/me/contacts/?$top=100&$select=displayName&orderby=displayName -% UrlParams = url_enc_params(#{"$top" => integer_to_list(BlkCount), "$orderby" => "displayName"}), -% ContactsUrl = erlang:iolist_to_binary([State#state.api_url, "?", UrlParams]), -% fetch_contacts(State#state{fetch_url = ContactsUrl}, BlkCount); -% fetch_contacts(#state{fetch_url = FetchUrl, key_prefix = KeyPrefix} = State, _BlkCount) -> -% case exec_req(FetchUrl, State#state.access_token) of -% #{<<"@odata.nextLink">> := NextLink, <<"value">> := Contacts} -> -% {ok, State#state{fetch_url = NextLink, infos = format_contacts(Contacts, KeyPrefix)}}; -% #{<<"value">> := Contacts} -> -% {ok, finish, State#state{fetch_url = undefined, infos = format_contacts(Contacts, KeyPrefix)}}; -% Error -> -% Error -% end. - format_contacts([], _) -> []; format_contacts([#{<<"id">> := IdBin} = Contact | Contacts], KeyPrefix) -> Id = binary_to_list(IdBin), Key = KeyPrefix ++ [Id], [{Key, Contact} | format_contacts(Contacts, KeyPrefix)]. +fetch_all_contacts(Url, AccessToken, KeyPrefix) -> + fetch_all_contacts(Url, AccessToken, KeyPrefix, []). + +fetch_all_contacts(Url, AccessToken, KeyPrefix, AccContacts) -> + ?JTrace("Fetching contacts with url : ~s", [Url]), + ?JTrace("Fetched contacts : ~p", [length(AccContacts)]), + case exec_req(Url, AccessToken) of + #{<<"@odata.nextLink">> := NextUrl, <<"value">> := Contacts} -> + FContacts = format_contacts(Contacts, KeyPrefix), + fetch_all_contacts(NextUrl, AccessToken, KeyPrefix, lists:append(FContacts, AccContacts)); + #{<<"value">> := Contacts} -> + FContacts = format_contacts(Contacts, KeyPrefix), + {ok, lists:keysort(1, lists:append(FContacts, AccContacts))}; + {error, Error} -> + {error, Error} + end. + +get_contacts_gt(CurKey, BlkCount, Contacts) -> + get_contacts_gt(CurKey, BlkCount, Contacts, []). + +get_contacts_gt(_CurKey, _BlkCount, [], Acc) -> lists:reverse(Acc); +get_contacts_gt(_CurKey, BlkCount, _Contacts, Acc) when length(Acc) == BlkCount -> + lists:reverse(Acc); +get_contacts_gt(CurKey, BlkCount, [{Key, _} | Contacts], Acc) when Key =< CurKey -> + get_contacts_gt(CurKey, BlkCount, Contacts, Acc); +get_contacts_gt(CurKey, BlkCount, [Contact | Contacts], Acc) -> + get_contacts_gt(CurKey, BlkCount, Contacts, [Contact | Acc]). + exec_req(Url, AccessToken) when is_binary(Url) -> exec_req(binary_to_list(Url), AccessToken); exec_req(Url, AccessToken) -> From be61400ddf024fe9f7022380d59892a65ccc07b5 Mon Sep 17 00:00:00 2001 From: shamis Date: Thu, 21 May 2020 13:37:06 +0200 Subject: [PATCH 30/72] passing the new record --- src/dperl/dperl_auth_cache.erl | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/dperl/dperl_auth_cache.erl b/src/dperl/dperl_auth_cache.erl index 221d5647..efc1e878 100644 --- a/src/dperl/dperl_auth_cache.erl +++ b/src/dperl/dperl_auth_cache.erl @@ -2,6 +2,8 @@ -behaviour(gen_server). +-include("dperl.hrl"). + -export([start_link/0, set_enc_hash/3, get_enc_hash/1, @@ -21,8 +23,10 @@ start_link() -> init([]) -> {ok, #{}}. -handle_call({setEncHash, JobOrServiceName, User, EncHash}, _From, State) -> - {reply, ok, State#{JobOrServiceName => {User, EncHash}}}; +handle_call({setEncHash, #dperlJob{name = JobName}, User, EncHash}, _From, State) -> + {reply, ok, State#{JobName => {User, EncHash}}}; +handle_call({setEncHash, #dperlService{name = ServiceName}, User, EncHash}, _From, State) -> + {reply, ok, State#{ServiceName => {User, EncHash}}}; handle_call({getEncHash, JobOrServiceName}, _From, State) -> case State of #{JobOrServiceName := {User, EncHash}} -> From 217df1bba0b80e23806a668f8d3bb8e3805594df Mon Sep 17 00:00:00 2001 From: shamis Date: Fri, 22 May 2020 10:06:21 +0200 Subject: [PATCH 31/72] cleanup flag added --- src/dperl/jobs/dperl_office_365.erl | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index fef40b22..e2bcc754 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -20,7 +20,8 @@ -export([get_authorize_url/1, get_access_token/1]). -record(state, {name, channel, is_connected = true, access_token, api_url, - contacts = [], key_prefix, fetch_url, cl_contacts = []}). + contacts = [], key_prefix, fetch_url, cl_contacts = [], + is_cleanup_finished = true}). % dperl_strategy_scr export -export([connect_check_src/1, get_source_events/2, connect_check_dst/1, @@ -120,12 +121,12 @@ load_src_after_key(CurKey, BlkCount, #state{fetch_url = undefined} = State) -> UrlParams = url_enc_params(#{"$top" => integer_to_list(BlkCount)}), ContactsUrl = erlang:iolist_to_binary([State#state.api_url, "?", UrlParams]), load_src_after_key(CurKey, BlkCount, State#state{fetch_url = ContactsUrl}); -load_src_after_key(CurKey, BlkCount, #state{cl_contacts = [], key_prefix = KeyPrefix, +load_src_after_key(CurKey, BlkCount, #state{is_cleanup_finished = true, key_prefix = KeyPrefix, access_token = AccessToken, fetch_url = FetchUrl} = State) -> % fetch all contacts case fetch_all_contacts(FetchUrl, AccessToken, KeyPrefix) of {ok, Contacts} -> - load_src_after_key(CurKey, BlkCount, State#state{cl_contacts = Contacts}); + load_src_after_key(CurKey, BlkCount, State#state{cl_contacts = Contacts, is_cleanup_finished = false}); {error, unauthorized} -> {error, unauthorized, State#state{is_connected = false}}; {error, Error} -> @@ -136,7 +137,7 @@ load_src_after_key(CurKey, BlkCount, #state{cl_contacts = Contacts} = State) -> do_cleanup(Deletes, Inserts, Diffs, IsFinished, State) -> NewState = State#state{contacts = Inserts ++ Diffs ++ Deletes}, - if IsFinished -> {ok, finish, NewState#state{cl_contacts = []}}; + if IsFinished -> {ok, finish, NewState#state{is_cleanup_finished = true}}; true -> {ok, NewState} end. From 8036a9ae6a8b01ce9fbb9aa326238cf59e4662b9 Mon Sep 17 00:00:00 2001 From: shamis Date: Fri, 22 May 2020 10:56:46 +0200 Subject: [PATCH 32/72] refreshing access token silently --- src/dperl/jobs/dperl_office_365.erl | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index e2bcc754..d62efe43 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -71,7 +71,7 @@ get_access_token(Code) -> connect_check_src(#state{is_connected = true} = State) -> {ok, State}; connect_check_src(#state{is_connected = false} = State) -> - ?Info("Refreshing access token"), + ?JTrace("Refreshing access token"), #{token_url := TUrl, client_id := ClientId, client_secret := Secret, scope := Scope} = get_office_365_auth_config(), #{<<"refresh_token">> := RefreshToken} = get_token_info(), @@ -84,10 +84,11 @@ connect_check_src(#state{is_connected = false} = State) -> TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), set_token_info(TokenBody), #{<<"access_token">> := AccessToken} = TokenInfo, + ?JInfo("new access token fetched"), {ok, State#state{access_token = AccessToken, is_connected = true}}; Error -> ?JError("Unexpected response : ~p", [Error]), - {ok, State} + {error, Error, State} end. get_source_events(#state{contacts = []} = State, _BulkSize) -> @@ -128,7 +129,12 @@ load_src_after_key(CurKey, BlkCount, #state{is_cleanup_finished = true, key_pref {ok, Contacts} -> load_src_after_key(CurKey, BlkCount, State#state{cl_contacts = Contacts, is_cleanup_finished = false}); {error, unauthorized} -> - {error, unauthorized, State#state{is_connected = false}}; + case connect_check_src(State#state{is_connected = false}) of + {ok, State1} -> + load_src_after_key(CurKey, BlkCount, State1); + {error, Error, State1} -> + {error, Error, State1} + end; {error, Error} -> {error, Error, State} end; From 497375272508d47444a153947afdcd2de24c21c7 Mon Sep 17 00:00:00 2001 From: shamis Date: Fri, 22 May 2020 13:35:25 +0200 Subject: [PATCH 33/72] write to push_channel --- src/dperl/jobs/dperl_office_365.erl | 35 ++++++++++++++++------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index d62efe43..b0861e94 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -21,7 +21,7 @@ -record(state, {name, channel, is_connected = true, access_token, api_url, contacts = [], key_prefix, fetch_url, cl_contacts = [], - is_cleanup_finished = true}). + is_cleanup_finished = true, push_channel, type = pull}). % dperl_strategy_scr export -export([connect_check_src/1, get_source_events/2, connect_check_dst/1, @@ -112,6 +112,20 @@ fetch_dst(Key, State) -> insert_dst(Key, Val, State) -> update_dst(Key, Val, State). +delete_dst(Key, #state{channel = Channel} = State) -> + dperl_dal:remove_from_channel(Channel, Key), + dperl_dal:remove_from_channel(State#state.push_channel, Key), + {false, State}. + +update_dst({Key, _}, Val, State) -> + update_dst(Key, Val, State); +update_dst(Key, Val, #state{channel = Channel} = State) when is_binary(Val) -> + dperl_dal:write_channel(Channel, Key, Val), + dperl_dal:write_channel(State#state.push_channel, Key, Val), + {false, State}; +update_dst(Key, Val, State) -> + update_dst(Key, imem_json:encode(Val), State). + report_status(_Key, _Status, _State) -> no_op. load_dst_after_key(CurKey, BlkCount, #state{channel = Channel}) -> @@ -147,23 +161,11 @@ do_cleanup(Deletes, Inserts, Diffs, IsFinished, State) -> true -> {ok, NewState} end. -delete_dst(Key, #state{channel = Channel} = State) -> - dperl_dal:remove_from_channel(Channel, Key), - {false, State}. - -update_dst({Key, _}, Val, State) -> - update_dst(Key, Val, State); -update_dst(Key, Val, #state{channel = Channel} = State) when is_binary(Val) -> - dperl_dal:write_channel(Channel, Key, Val), - {false, State}; -update_dst(Key, Val, State) -> - update_dst(Key, imem_json:encode(Val), State). - get_status(#state{}) -> #{}. init_state(_) -> #state{}. -init({#dperlJob{name=Name, dstArgs = #{channel := Channel}, +init({#dperlJob{name=Name, dstArgs = #{channel := Channel, push_channel := PChannel}, srcArgs = #{api_url := ApiUrl} = SrcArgs}, State}) -> % case dperl_auth_cache:get_enc_hash(Name) of % undefined -> @@ -175,10 +177,13 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel}, case get_token_info() of #{<<"access_token">> := AccessToken} -> ChannelBin = dperl_dal:to_binary(Channel), + PChannelBin = dperl_dal:to_binary(PChannel), KeyPrefix = maps:get(key_prefix, SrcArgs, []), dperl_dal:create_check_channel(ChannelBin), + dperl_dal:create_check_channel(PChannelBin), {ok, State#state{channel = ChannelBin, name = Name, api_url = ApiUrl, - key_prefix = KeyPrefix, access_token = AccessToken}}; + key_prefix = KeyPrefix, access_token = AccessToken, + push_channel = PChannelBin}}; _ -> ?JError("Access token not found"), {stop, badarg} From 36845c0efa93f3cc47af472e6b49113feeb7856b Mon Sep 17 00:00:00 2001 From: shamis Date: Fri, 22 May 2020 15:36:44 +0200 Subject: [PATCH 34/72] error handling and formatted code --- priv/dev/static/scripts/login.js | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/priv/dev/static/scripts/login.js b/priv/dev/static/scripts/login.js index 85b8c898..982a7cfc 100644 --- a/priv/dev/static/scripts/login.js +++ b/priv/dev/static/scripts/login.js @@ -30,8 +30,15 @@ const state = url.searchParams.get('state'); if(code) { dderlState.xsrfToken = state; const body = { 'office_365_code': { 'code': code, 'state': state } }; - ajaxCall(null, 'office_365_code', body, 'office_365_code', function () { window.close(); }, - function () { alert('error!!!!'); }); + ajaxCall(null, 'office_365_code', body, 'office_365_code', + function () { + window.close(); + }, + function (error) { + alert('Error fetching access token : ' + error); + console.log('Error fetching access token', error); + window.close(); + }); } else { const error = url.searchParams.get('error'); const errorDesc = url.searchParams.get('error_description'); @@ -419,7 +426,7 @@ export function change_login_password(loggedInUser, shouldConnect) { } export function authorize_office() { - ajaxCall(null, 'office_365_auth_config', {}, 'office_365_auth_config', function(auth_config) { + ajaxCall(null, 'office_365_auth_config', {}, 'office_365_auth_config', function (auth_config) { const params = 'scrollbars=no,resizable=no,status=no,location=no,toolbar=no,menubar=no,width=500,height=600,left=100,top=100'; window.open(auth_config.url, 'Office 365 login', params); }); From 29cbefa9ceb2e6b1d4a5de5e42764020cd3e8fec Mon Sep 17 00:00:00 2001 From: shamis Date: Fri, 22 May 2020 19:02:58 +0200 Subject: [PATCH 35/72] push type job implemented --- src/dperl/jobs/dperl_office_365.erl | 114 +++++++++++++++++++++++++--- 1 file changed, 102 insertions(+), 12 deletions(-) diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index b0861e94..22127be7 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -21,7 +21,8 @@ -record(state, {name, channel, is_connected = true, access_token, api_url, contacts = [], key_prefix, fetch_url, cl_contacts = [], - is_cleanup_finished = true, push_channel, type = pull}). + is_cleanup_finished = true, push_channel, type = pull, + audit_start_time = {0,0}, first_sync = true}). % dperl_strategy_scr export -export([connect_check_src/1, get_source_events/2, connect_check_dst/1, @@ -91,6 +92,21 @@ connect_check_src(#state{is_connected = false} = State) -> {error, Error, State} end. +get_source_events(#state{audit_start_time = LastStartTime, type = push, + push_channel = PChannel} = State, BulkSize) -> + case dperl_dal:read_audit_keys(PChannel, LastStartTime, BulkSize) of + {LastStartTime, LastStartTime, []} -> + if State#state.first_sync == true -> + ?JInfo("Audit rollup is complete"), + {ok, sync_complete, State#state{first_sync = false}}; + true -> {ok, sync_complete, State} + end; + {_StartTime, NextStartTime, []} -> + {ok, [], State#state{audit_start_time = NextStartTime}}; + {_StartTime, NextStartTime, Keys} -> + UniqueKeys = lists:delete(undefined, lists:usort(Keys)), + {ok, UniqueKeys, State#state{audit_start_time = NextStartTime}} + end; get_source_events(#state{contacts = []} = State, _BulkSize) -> {ok, sync_complete, State}; get_source_events(#state{contacts = Contacts} = State, _BulkSize) -> @@ -100,25 +116,72 @@ connect_check_dst(State) -> {ok, State}. do_refresh(_State, _BulkSize) -> {error, cleanup_only}. -fetch_src(Key, #state{cl_contacts = Contacts}) -> +fetch_src(Key, #state{type = push} = State) -> + dperl_dal:read_channel(State#state.push_channel, Key); +fetch_src(Key, #state{cl_contacts = Contacts, type = pull}) -> case lists:keyfind(Key, 1, Contacts) of {Key, Contact} -> Contact; false -> ?NOT_FOUND end. +fetch_dst(Key, #state{type = push, api_url = ApiUrl} = State) -> + Id = Key -- State#state.key_prefix, + ContactUrl = erlang:iolist_to_binary([ApiUrl, Id]), + case exec_req(ContactUrl, State#state.access_token) of + #{<<"id">> := _} = Contact -> Contact; + _ -> ?NOT_FOUND + end; fetch_dst(Key, State) -> dperl_dal:read_channel(State#state.channel, Key). +insert_dst(Key, Val, #state{type = push, api_url = ApiUrl} = State) -> + case exec_req(ApiUrl, State#state.access_token, Val, post) of + #{<<"id">> := Id} = Contact -> + NewKey = State#state.key_prefix ++ [binary_to_list(Id)], + ContactBin = imem_json:encode(Contact), + dperl_dal:remove_from_channel(State#state.push_channel, Key), + dperl_dal:write_channel(State#state.channel, NewKey, ContactBin), + dperl_dal:write_channel(State#state.push_channel, NewKey, ContactBin), + {false, State}; + {error, unauthorized} -> + reconnect_exec(State, insert_dst, [Key, Val]); + {error, Error} -> + {error, Error} + end; insert_dst(Key, Val, State) -> update_dst(Key, Val, State). +delete_dst(Key, #state{type = push, api_url = ApiUrl} = State) -> + Id = Key -- State#state.key_prefix, + ContactUrl = erlang:iolist_to_binary([ApiUrl, Id]), + case exec_req(ContactUrl, State#state.access_token, #{}, delete) of + ok -> + dperl_dal:remove_from_channel(State#state.channel, Key), + {false, State}; + {error, unauthorized} -> + reconnect_exec(State, delete_dst, [Key]); + Error -> + Error + end; delete_dst(Key, #state{channel = Channel} = State) -> dperl_dal:remove_from_channel(Channel, Key), dperl_dal:remove_from_channel(State#state.push_channel, Key), {false, State}. -update_dst({Key, _}, Val, State) -> - update_dst(Key, Val, State); +update_dst(Key, Val, #state{type = push, api_url = ApiUrl} = State) -> + Id = Key -- State#state.key_prefix, + ContactUrl = erlang:iolist_to_binary([ApiUrl, Id]), + case exec_req(ContactUrl, State#state.access_token, Val, patch) of + #{<<"id">> := _} = Contact -> + ContactBin = imem_json:encode(Contact), + dperl_dal:write_channel(State#state.channel, Key, ContactBin), + dperl_dal:write_channel(State#state.push_channel, Key, ContactBin), + {false, State}; + {error, unauthorized} -> + reconnect_exec(State, update_dst, [Key, Val]); + {error, Error} -> + {error, Error} + end; update_dst(Key, Val, #state{channel = Channel} = State) when is_binary(Val) -> dperl_dal:write_channel(Channel, Key, Val), dperl_dal:write_channel(State#state.push_channel, Key, Val), @@ -143,18 +206,21 @@ load_src_after_key(CurKey, BlkCount, #state{is_cleanup_finished = true, key_pref {ok, Contacts} -> load_src_after_key(CurKey, BlkCount, State#state{cl_contacts = Contacts, is_cleanup_finished = false}); {error, unauthorized} -> - case connect_check_src(State#state{is_connected = false}) of - {ok, State1} -> - load_src_after_key(CurKey, BlkCount, State1); - {error, Error, State1} -> - {error, Error, State1} - end; + reconnect_exec(State, load_src_after_key, [CurKey, BlkCount]); {error, Error} -> {error, Error, State} end; load_src_after_key(CurKey, BlkCount, #state{cl_contacts = Contacts} = State) -> {ok, get_contacts_gt(CurKey, BlkCount, Contacts), State}. +reconnect_exec(State, Fun, Args) -> + case connect_check_src(State#state{is_connected = false}) of + {ok, State1} -> + erlang:apply(?MODULE, Fun, Args ++ [State1]); + {error, Error, State1} -> + {error, Error, State1} + end. + do_cleanup(Deletes, Inserts, Diffs, IsFinished, State) -> NewState = State#state{contacts = Inserts ++ Diffs ++ Deletes}, if IsFinished -> {ok, finish, NewState#state{is_cleanup_finished = true}}; @@ -166,7 +232,7 @@ get_status(#state{}) -> #{}. init_state(_) -> #state{}. init({#dperlJob{name=Name, dstArgs = #{channel := Channel, push_channel := PChannel}, - srcArgs = #{api_url := ApiUrl} = SrcArgs}, State}) -> + srcArgs = #{api_url := ApiUrl} = SrcArgs, args = Args}, State}) -> % case dperl_auth_cache:get_enc_hash(Name) of % undefined -> % ?JError("Encryption hash is not avaialable"), @@ -179,11 +245,12 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel, push_channel := PChan ChannelBin = dperl_dal:to_binary(Channel), PChannelBin = dperl_dal:to_binary(PChannel), KeyPrefix = maps:get(key_prefix, SrcArgs, []), + Type = maps:get(type, Args, pull), dperl_dal:create_check_channel(ChannelBin), dperl_dal:create_check_channel(PChannelBin), {ok, State#state{channel = ChannelBin, name = Name, api_url = ApiUrl, key_prefix = KeyPrefix, access_token = AccessToken, - push_channel = PChannelBin}}; + push_channel = PChannelBin, type = Type}}; _ -> ?JError("Access token not found"), {stop, badarg} @@ -257,6 +324,29 @@ exec_req(Url, AccessToken) -> {error, Error} end. +exec_req(Url, AccessToken, Body, Method) when is_binary(Url) -> + exec_req(binary_to_list(Url), AccessToken, Body, Method); +exec_req(Url, AccessToken, Body, Method) -> + AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}], + % Headers = [AuthHeader, {"Contnet-type", "application/json"}], + case httpc:request(Method, {Url, AuthHeader, "application/json", imem_json:encode(Body)}, [], []) of + {ok, {{_, 201, _}, _, Result}} -> + % create/post result + imem_json:decode(list_to_binary(Result), [return_maps]); + {ok, {{_, 200, _}, _, Result}} -> + % update/patch result + imem_json:decode(list_to_binary(Result), [return_maps]); + {ok,{{_, 204, _}, _, _}} -> + % delete result + ok; + {ok, {{_, 401, _}, _, Error}} -> + ?JError("Unauthorized body : ~s", [Error]), + {error, unauthorized}; + Error -> + {error, Error} + end. + + url_enc_params(Params) -> EParams = maps:fold( fun(K, {enc, V}, Acc) -> From 682c7a430d32a6b8d96a4a390741459df55d54a0 Mon Sep 17 00:00:00 2001 From: shamis Date: Mon, 25 May 2020 11:39:21 +0200 Subject: [PATCH 36/72] removing tag and context --- src/dperl/jobs/dperl_office_365.erl | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index 22127be7..a0a5c0a8 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -128,7 +128,8 @@ fetch_dst(Key, #state{type = push, api_url = ApiUrl} = State) -> Id = Key -- State#state.key_prefix, ContactUrl = erlang:iolist_to_binary([ApiUrl, Id]), case exec_req(ContactUrl, State#state.access_token) of - #{<<"id">> := _} = Contact -> Contact; + #{<<"id">> := _} = Contact -> + format_contact(Contact); _ -> ?NOT_FOUND end; fetch_dst(Key, State) -> @@ -138,7 +139,7 @@ insert_dst(Key, Val, #state{type = push, api_url = ApiUrl} = State) -> case exec_req(ApiUrl, State#state.access_token, Val, post) of #{<<"id">> := Id} = Contact -> NewKey = State#state.key_prefix ++ [binary_to_list(Id)], - ContactBin = imem_json:encode(Contact), + ContactBin = imem_json:encode(format_contact(Contact)), dperl_dal:remove_from_channel(State#state.push_channel, Key), dperl_dal:write_channel(State#state.channel, NewKey, ContactBin), dperl_dal:write_channel(State#state.push_channel, NewKey, ContactBin), @@ -173,7 +174,7 @@ update_dst(Key, Val, #state{type = push, api_url = ApiUrl} = State) -> ContactUrl = erlang:iolist_to_binary([ApiUrl, Id]), case exec_req(ContactUrl, State#state.access_token, Val, patch) of #{<<"id">> := _} = Contact -> - ContactBin = imem_json:encode(Contact), + ContactBin = imem_json:encode(format_contact(Contact)), dperl_dal:write_channel(State#state.channel, Key, ContactBin), dperl_dal:write_channel(State#state.push_channel, Key, ContactBin), {false, State}; @@ -281,7 +282,10 @@ format_contacts([], _) -> []; format_contacts([#{<<"id">> := IdBin} = Contact | Contacts], KeyPrefix) -> Id = binary_to_list(IdBin), Key = KeyPrefix ++ [Id], - [{Key, Contact} | format_contacts(Contacts, KeyPrefix)]. + [{Key, format_contact(Contact)} | format_contacts(Contacts, KeyPrefix)]. + +format_contact(Contact) -> + maps:without([<<"@odata.etag">>, <<"@odata.context">>], Contact). fetch_all_contacts(Url, AccessToken, KeyPrefix) -> fetch_all_contacts(Url, AccessToken, KeyPrefix, []). From 4894bead044e8c01ac60d0e34cac7a297dd6713b Mon Sep 17 00:00:00 2001 From: shamis Date: Tue, 26 May 2020 10:56:09 +0200 Subject: [PATCH 37/72] oura ring granting access from the front end --- priv/dev/static/index.html | 1 + priv/dev/static/index.js | 6 +- priv/dev/static/scripts/login.js | 31 ++-- src/dderl_session.erl | 17 +++ src/dperl/jobs/dperl_office_365.erl | 4 +- src/dperl/jobs/dperl_ouraring_crawl.erl | 195 ++++++++++++------------ 6 files changed, 141 insertions(+), 113 deletions(-) diff --git a/priv/dev/static/index.html b/priv/dev/static/index.html index 84e6d582..bdc5e9e4 100644 --- a/priv/dev/static/index.html +++ b/priv/dev/static/index.html @@ -24,6 +24,7 @@
  • Disconnect
  • Restart Application
  • Register Office 365
  • +
  • Register Oura Ring
  • New Connection
  • diff --git a/priv/dev/static/index.js b/priv/dev/static/index.js index 909bb28b..16949827 100644 --- a/priv/dev/static/index.js +++ b/priv/dev/static/index.js @@ -8,7 +8,8 @@ import {loginAjax} from "./scripts/login"; import {alert_jq} from './dialogs/dialogs'; import {dderlState, show_qry_files, change_password, show_about_dlg} from "./scripts/dderl"; -import {new_connection_tab, logout, restart, authorize_office} from "./scripts/login"; +import {new_connection_tab, logout, restart, + authorize_office, authorize_oura} from "./scripts/login"; import {disconnect_tab, close_tab} from "./scripts/connect"; import {newSqlEditor} from "./scripts/dderl.sql"; import {patch_jquery_ui} from "./jquery-ui-helper/helper.js"; @@ -93,4 +94,5 @@ addClick('btn-restart', () => { restart(); }); addClick('connect-button', () => { new_connection_tab(); }); addClick('newsql-button', () => { newSqlEditor(); }); addClick('btn-logout', () => { logout(); }); -addClick('btn-office-365', () => { authorize_office(); }); +addClick('btn-office-365', () => { authorize_office(); }); +addClick('btn-oura-ring', () => { authorize_oura(); }); diff --git a/priv/dev/static/scripts/login.js b/priv/dev/static/scripts/login.js index 85b8c898..19e30bc5 100644 --- a/priv/dev/static/scripts/login.js +++ b/priv/dev/static/scripts/login.js @@ -26,16 +26,22 @@ window.loginCb = loginCb; // office 365 callback handler const url = new URL(window.location.href); const code = url.searchParams.get('code'); -const state = url.searchParams.get('state'); -if(code) { - dderlState.xsrfToken = state; - const body = { 'office_365_code': { 'code': code, 'state': state } }; - ajaxCall(null, 'office_365_code', body, 'office_365_code', function () { window.close(); }, - function () { alert('error!!!!'); }); -} else { - const error = url.searchParams.get('error'); +const error = url.searchParams.get('error'); +const state = JSON.parse(url.searchParams.get('state')); +if (code) { + dderlState.xsrfToken = state.xsrfToken; + if (state.type == 'ouraRing') { + const body = { 'oura_ring_code': { 'code': code, 'state': state } }; + ajaxCall(null, 'oura_ring_code', body, 'oura_ring_code', function () { window.close(); }, + function () { alert('error!!!!'); }); + } else { + const body = { 'office_365_code': { 'code': code, 'state': state } }; + ajaxCall(null, 'office_365_code', body, 'office_365_code', function () { window.close(); }, + function () { alert('error!!!!'); }); + } +} else if (error) { const errorDesc = url.searchParams.get('error_description'); - alert_jq('Login error : ' + error + ' : ' + errorDesc); + alert('Login error : ' + error + ' : ' + errorDesc); window.close(); } // office 365 callback handler end @@ -424,3 +430,10 @@ export function authorize_office() { window.open(auth_config.url, 'Office 365 login', params); }); } + +export function authorize_oura() { + ajaxCall(null, 'oura_ring_auth_config', {}, 'oura_ring_auth_config', function(auth_config) { + const params = 'scrollbars=no,resizable=no,status=no,location=no,toolbar=no,menubar=no,width=500,height=600,left=100,top=100'; + window.open(auth_config.url, 'Oura Ring login', params); + }); +} diff --git a/src/dderl_session.erl b/src/dderl_session.erl index d4ec6747..8c23c65f 100644 --- a/src/dderl_session.erl +++ b/src/dderl_session.erl @@ -397,6 +397,23 @@ process_call({[<<"office_365_code">>], ReqData}, _Adapter, From, {SrcIp, _}, Sta end, State; +process_call({[<<"oura_ring_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, _}, State) -> + act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "about"}, State), + Url = dperl_ouraring_crawl:get_authorize_url(State#state.xsrf_token), + reply(From, #{<<"oura_ring_auth_config">> => #{<<"url">> => Url}}, self()), + State; + +process_call({[<<"oura_ring_code">>], ReqData}, _Adapter, From, {SrcIp, _}, State) -> + act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "format_json_to_save", args => ReqData}, State), + #{<<"oura_ring_code">> := #{<<"code">> := Code}} = jsx:decode(ReqData, [return_maps]), + case dperl_ouraring_crawl:get_access_token(Code) of + ok -> + reply(From, #{<<"oura_ring_code">> => #{<<"status">> => <<"ok">>}}, self()); + {error, _Error} -> + reply(From, #{<<"oura_ring_code">> => #{<<"error">> => <<"Fetching token failed, Try again">>}}, self()) + end, + State; + process_call({[<<"connect_info">>], _ReqData}, _Adapter, From, {SrcIp,_}, #state{sess=Sess, user_id=UserId, user = User} = State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "connect_info"}, State), diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index d62efe43..d7a25255 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -44,11 +44,11 @@ set_token_info(TokenInfo) when is_binary(TokenInfo) -> dperl_dal:write_channel(<<"avatar">>, ["office365","token"], TokenInfo). get_authorize_url(XSRFToken) -> - URLState = http_uri:encode(XSRFToken), + State = #{xsrf_token => XSRFToken, type => <<"office365">>}, #{auth_url := Url, client_id := ClientId, redirect_uri := RedirectURI, scope := Scope} = get_office_365_auth_config(), UrlParams = url_enc_params(#{"client_id" => ClientId, "redirect_uri" => {enc, RedirectURI}, - "scope" => {enc, Scope}, "state" => URLState}), + "scope" => {enc, Scope}, "state" => {enc, imem_json:encode(State)}}), erlang:iolist_to_binary([Url, "&", UrlParams]). get_access_token(Code) -> diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index 723b2445..01e6763a 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -10,94 +10,89 @@ "Days to be shifted backwards for starting the job") ). +-define(OURA_RING_AUTH_CONFIG, + ?GET_CONFIG(ouraRingAuthConfig,[], + #{auth_url =>"https://cloud.ouraring.com/oauth/authorize?response_type=code", + client_id => "12345", redirect_uri => "https://localhost:8443/dderl/", + client_secret => "12345", grant_type => "authorization_code", + token_url => "https://cloud.ouraring.com/oauth/token", + scope => "email personal daily"}, + "Oura Ring auth config")). + % dperl_worker exports -export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, get_status/1, init_state/1]). --record(state, {name, channel, client_id, client_secret, password, email, - cb_uri, is_connected = false, access_token, api_url, oauth_url, +-export([get_authorize_url/1, get_access_token/1]). + +-record(state, {name, channel, is_connected = true, access_token, api_url, last_sleep_day, last_activity_day, last_readiness_day, - infos = [], auth_time, auth_expiry, key_prefix}). + infos = [], key_prefix}). % dperl_strategy_scr export -export([connect_check_src/1, get_source_events/2, connect_check_dst/1, - do_cleanup/2, do_refresh/2, - fetch_src/2, fetch_dst/2, delete_dst/2, insert_dst/3, - update_dst/3, report_status/3]). - -connect_check_src(#state{is_connected = true, auth_expiry = ExpiresIn, auth_time = AuthTime} = State) -> - case imem_datatype:sec_diff(AuthTime) of - Diff when Diff >= (ExpiresIn - 100) -> - % access token will expire in 100 seconds or less - connect_check_src(State#state{is_connected = false}); - _ -> - {ok, State} - end; -connect_check_src(#state{is_connected = false, client_id = ClientId, cb_uri = CallbackUri, - client_secret = ClientSecret, password = Password, - email = Email, oauth_url = OauthUrl} = State) -> - ?Info("Generating new access token"), - httpc:reset_cookies(?MODULE), - Params = #{ - "response_type" => "code", - "client_id" => ClientId, - "redirect_uri" => edoc_lib:escape_uri(CallbackUri), - "scope" => "email+personal+daily", - "state" => "test" - }, - Url = OauthUrl ++ "/oauth/authorize?" ++ binary_to_list(url_enc_params(Params)), - try - {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302, []}} = httpc:request( - get, {Url, []}, [{autoredirect, false}], [], ?MODULE), - RedirectUri = OauthUrl ++ proplists:get_value("location", RespHeader302), - {ok, {{"HTTP/1.1",200,"OK"}, RespHeader, _Body}} = httpc:request(get, {RedirectUri, []}, [{autoredirect, false}], [], ?MODULE), - SetCookieHeader = proplists:get_value("set-cookie", RespHeader), - {match, [XRefCookie]} = re:run(SetCookieHeader, ".*_xsrf=(.*);.*", [{capture, [1], list}]), - Params2 = #{ - "_xsrf" => edoc_lib:escape_uri(XRefCookie), - "email" => edoc_lib:escape_uri(Email), - "password" => edoc_lib:escape_uri(Password) - }, - {ok,{{"HTTP/1.1",302,"Found"}, RespHeader302_1, []}} = httpc:request( - post, { - RedirectUri, [], "application/x-www-form-urlencoded", - url_enc_params(Params2)}, [{autoredirect, false}], [], ?MODULE), - RedirectUri_1 = OauthUrl ++ proplists:get_value("location", RespHeader302_1), - {ok, {{"HTTP/1.1",200,"OK"}, _, _}} = httpc:request(get, {RedirectUri_1, []}, [{autoredirect, false}], [], ?MODULE), - Params3 = #{ - "_xsrf" => edoc_lib:escape_uri(XRefCookie), - "cope_email" => "on", - "scope_personal" => "on", - "scope_daily" => "on", - "allow" => "Accept" - }, - {ok, {{"HTTP/1.1",302,"Found"}, RespHeader302_2, []}} = httpc:request( - post, { - RedirectUri_1, [], "application/x-www-form-urlencoded", - url_enc_params(Params3)}, [{autoredirect, false}], [], ?MODULE), - RedirectUri_2 = proplists:get_value("location", RespHeader302_2), - #{query := QueryString} = uri_string:parse(RedirectUri_2), - #{"code" := Code} = maps:from_list(uri_string:dissect_query(QueryString)), - Params4 = #{ - "grant_type" => "authorization_code", - "code" => Code, "client_id" => ClientId, - "redirect_uri" => edoc_lib:escape_uri(CallbackUri), - "client_secret" => ClientSecret - }, - {ok, {{"HTTP/1.1",200,"OK"}, _, BodyJson}} = httpc:request( - post, { - OauthUrl ++ "/oauth/token", [], "application/x-www-form-urlencoded", - url_enc_params(Params4)}, [{autoredirect, false}], [], ?MODULE), - #{<<"access_token">> := AccessToken, <<"expires_in">> := ExpiresIn} = Auth = jsx:decode(list_to_binary(BodyJson), [return_maps]), - ?JInfo("Authentication successful : ~p", [Auth]), - {ok, State#state{is_connected = true, access_token = AccessToken, - auth_expiry = ExpiresIn, auth_time = imem_meta:time()}} - catch - Class:Error:Stacktrace -> - ?JError("Unexpected response : ~p:~p:~p", [Class, Error, Stacktrace]), - {error, invalid_return, State} - end; -connect_check_src(State) -> {ok, State}. + do_cleanup/2, do_refresh/2, fetch_src/2, fetch_dst/2, delete_dst/2, + insert_dst/3, update_dst/3, report_status/3]). + +get_oura_ring_auth_config() -> + ?OURA_RING_AUTH_CONFIG. + +get_token_info() -> + dperl_dal:read_channel(<<"avatar">>, ["ouraRing","token"]). + +set_token_info(TokenInfo) when is_map(TokenInfo) -> + set_token_info(imem_json:encode(TokenInfo)); +set_token_info(TokenInfo) when is_list(TokenInfo) -> + set_token_info(list_to_binary(TokenInfo)); +set_token_info(TokenInfo) when is_binary(TokenInfo) -> + dperl_dal:create_check_channel(<<"avatar">>), + dperl_dal:write_channel(<<"avatar">>, ["ouraRing","token"], TokenInfo). + +get_authorize_url(XSRFToken) -> + State = #{xsrfToken => XSRFToken, type => <<"ouraRing">>}, + #{auth_url := Url, client_id := ClientId, redirect_uri := RedirectURI, + scope := Scope} = get_oura_ring_auth_config(), + UrlParams = url_enc_params(#{"client_id" => ClientId, "state" => {enc, imem_json:encode(State)}, + "scope" => {enc, Scope},"redirect_uri" => {enc, RedirectURI}}), + erlang:iolist_to_binary([Url, "&", UrlParams]). + +get_access_token(Code) -> + #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, + client_secret := Secret, grant_type := GrantType} = get_oura_ring_auth_config(), + Body = url_enc_params(#{"client_id" => ClientId, "code" => Code, "redirect_uri" => {enc, RedirectURI}, + "client_secret" => {enc, Secret}, "grant_type" => GrantType}), + ContentType = "application/x-www-form-urlencoded", + case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of + {ok, {_, _, TokenInfo}} -> + set_token_info(TokenInfo), + ok; + {error, Error} -> + ?Error("Fetching access token : ~p", [Error]), + {error, Error} + end. + + +connect_check_src(#state{is_connected = true} = State) -> + {ok, State}; +connect_check_src(#state{is_connected = false} = State) -> + ?JTrace("Refreshing access token"), + #{token_url := TUrl, client_id := ClientId, + client_secret := Secret} = get_oura_ring_auth_config(), + #{<<"refresh_token">> := RefreshToken} = get_token_info(), + Body = url_enc_params(#{"client_id" => ClientId, "client_secret" => {enc, Secret}, + "refresh_token" => RefreshToken, "grant_type" => "refresh_token"}), + ContentType = "application/x-www-form-urlencoded", + case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of + {ok, {{_, 200, "OK"}, _, TokenBody}} -> + TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), + set_token_info(TokenBody), + #{<<"access_token">> := AccessToken} = TokenInfo, + ?JInfo("new access token fetched"), + {ok, State#state{access_token = AccessToken, is_connected = true}}; + Error -> + ?JError("Unexpected response : ~p", [Error]), + {error, Error, State} + end. get_source_events(#state{infos = []} = State, _BulkSize) -> {ok, sync_complete, State}; @@ -161,11 +156,8 @@ init_state([#dperlNodeJobDyn{state = State} | _]) -> init_state([_ | Others]) -> init_state(Others). -init({#dperlJob{name=Name, dstArgs = #{channel := Channel}, - srcArgs = #{client_id := ClientId, user_password := Password, - client_secret := ClientSecret, user_email := Email, - cb_uri := CallbackUri, api_url := ApiUrl, - oauth_url := OauthUrl} = SrcArgs}, State}) -> +init({#dperlJob{name=Name, dstArgs = #{channel := Channel} = DstArgs, + srcArgs = #{api_url := ApiUrl}}, State}) -> case dperl_auth_cache:get_enc_hash(Name) of undefined -> ?JError("Encryption hash is not avaialable"), @@ -173,16 +165,17 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel}, {User, EncHash} -> ?JInfo("Starting with ~p's enchash...", [User]), imem_sec_mnesia:put_enc_hash(EncHash), - ChannelBin = dperl_dal:to_binary(Channel), - KeyPrefix = maps:get(key_prefix, SrcArgs, []), - dperl_dal:create_check_channel(ChannelBin), - inets:start(httpc, [{profile, ?MODULE}]), - ok = httpc:set_options([{cookies, enabled}], ?MODULE), - {ok, State#state{channel = ChannelBin, client_id = ClientId, - client_secret = ClientSecret, password = Password, - email = Email, cb_uri = CallbackUri, name = Name, - api_url = ApiUrl, oauth_url = OauthUrl, - key_prefix = KeyPrefix}} + case get_token_info() of + #{<<"access_token">> := AccessToken} -> + ChannelBin = dperl_dal:to_binary(Channel), + KeyPrefix = maps:get(key_prefix, DstArgs, []), + dperl_dal:create_check_channel(ChannelBin), + {ok, State#state{channel = ChannelBin, api_url = ApiUrl, + key_prefix = KeyPrefix, access_token = AccessToken}}; + _ -> + ?JError("Access token not found"), + {stop, badarg} + end end; init(Args) -> ?JError("bad start parameters ~p", [Args]), @@ -262,7 +255,7 @@ fetch_metric(Type, Day, #state{api_url = ApiUrl, access_token = AccessToken} = S end. fetch_metric(Type, DayQuery, ApiUrl, AccessToken) -> - Url = ApiUrl ++ "/v1/" ++ Type ++ DayQuery, + Url = ApiUrl ++ Type ++ DayQuery, TypeBin = list_to_binary(Type), case exec_req(Url, AccessToken) of #{TypeBin := []} -> @@ -274,7 +267,7 @@ fetch_metric(Type, DayQuery, ApiUrl, AccessToken) -> end. fetch_userinfo(#state{api_url = ApiUrl, access_token = AccessToken} = State) -> - case exec_req(ApiUrl ++ "/v1/userinfo", AccessToken) of + case exec_req(ApiUrl ++ "userinfo", AccessToken) of UserInfo when is_map(UserInfo) -> Info = {build_key("userinfo", State#state.key_prefix), UserInfo}, State#state{infos = [Info | State#state.infos]}; @@ -309,7 +302,7 @@ get_day(Type, State) -> exec_req(Url, AccessToken) -> AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}], - case httpc:request(get, {Url, AuthHeader}, [{autoredirect, false}], [], ?MODULE) of + case httpc:request(get, {Url, AuthHeader}, [], []) of {ok, {{_, 200, "OK"}, _, Result}} -> imem_json:decode(list_to_binary(Result), [return_maps]); Error -> @@ -344,7 +337,9 @@ build_key(Type, KeyPrefix) when is_list(Type), is_list(KeyPrefix)-> url_enc_params(Params) -> EParams = maps:fold( - fun(K, V, Acc) -> + fun(K, {enc, V}, Acc) -> + ["&", K, "=", http_uri:encode(V) | Acc]; + (K, V, Acc) -> ["&", K, "=", V | Acc] end, [], Params), - erlang:iolist_to_binary(tl(EParams)). + erlang:iolist_to_binary([tl(EParams)]). From 94b7cd44819e2c645412d90e22c06b532875b299 Mon Sep 17 00:00:00 2001 From: shamis Date: Tue, 26 May 2020 14:32:16 +0200 Subject: [PATCH 38/72] trigger on dperlJob table --- src/dperl/dperl_auth_cache.erl | 8 +++++++- src/dperl/dperl_dal.erl | 5 ++++- src/dperl/dperl_sup.erl | 3 ++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/src/dperl/dperl_auth_cache.erl b/src/dperl/dperl_auth_cache.erl index efc1e878..cd66751d 100644 --- a/src/dperl/dperl_auth_cache.erl +++ b/src/dperl/dperl_auth_cache.erl @@ -7,7 +7,8 @@ -export([start_link/0, set_enc_hash/3, get_enc_hash/1, - set_enc_hash_locally/3]). + set_enc_hash_locally/3, + auth_cache_trigger_fun/0]). %% gen_server callbacks -export([init/1, @@ -52,3 +53,8 @@ set_enc_hash_locally(JobOrServiceName, User, EncHash) -> get_enc_hash(JobOrServiceName) -> gen_server:call(?MODULE, {getEncHash, JobOrServiceName}). + +auth_cache_trigger_fun() -> + "fun(OldRec,NewRec,Table,User,TrOpts) -> + dperl_auth_cache:set_enc_hash(NewRec, User, proplists:get_value(encHash, TrOpts, undefined)) + end.". diff --git a/src/dperl/dperl_dal.erl b/src/dperl/dperl_dal.erl index ba242cd6..9c892ff7 100644 --- a/src/dperl/dperl_dal.erl +++ b/src/dperl/dperl_dal.erl @@ -18,7 +18,7 @@ read_siblings/2, read_channel_raw/2, worker_error/4, sort_links/1, get_pool_name/1, remote_dal/3, get_pool_name/2, run_oci_stmt/3, activity_logger/3, create_check_index/2, to_atom/1, report_status/7, - key_to_json/1, key_to_json_enc/1]). + key_to_json/1, key_to_json_enc/1, create_or_replace_tigger/2]). check_table(Table, ColumnNames, ColumnTypes, DefaultRecord, Opts) -> case catch imem_meta:create_check_table( @@ -52,6 +52,9 @@ create_check_index(Channel, IndexDefinition) -> Other -> Other end. +create_or_replace_tigger(Table, FunStr) -> + imem_meta:create_or_replace_trigger(Table, FunStr). + -spec write_channel(binary(), any(), any()) -> ok | {error, any()}. write_channel(Channel, Key, Val) when is_map(Val); byte_size(Val) > 0 -> case catch imem_dal_skvh:write(system, Channel, Key, Val) of diff --git a/src/dperl/dperl_sup.erl b/src/dperl/dperl_sup.erl index 04f4d758..d766cd5b 100644 --- a/src/dperl/dperl_sup.erl +++ b/src/dperl/dperl_sup.erl @@ -25,7 +25,7 @@ start_link() -> ) end, [ - ?TABLESPEC(dperlJob,[]), + ?TABLESPEC(dperlJob,[{trigger, dperl_auth_cache:auth_cache_trigger_fun()}]), ?TABLESPEC( ?JOBDYN_TABLE, dperlNodeJobDyn, [ @@ -45,6 +45,7 @@ start_link() -> ) ] ), + dperl_dal:create_or_replace_tigger(dperlJob, dperl_auth_cache:auth_cache_trigger_fun()), ok = dderl:add_d3_templates_path( dderl, filename:join(priv_dir(), "dashboard_scripts") ), From c80d8c8d5f89933e69a50f9f07917c50c5d0d0a2 Mon Sep 17 00:00:00 2001 From: shamis Date: Tue, 26 May 2020 16:45:56 +0200 Subject: [PATCH 39/72] oauth callback refactoring wip --- priv/dev/static/scripts/login.js | 46 +++++++++++++++++++----------- src/dderl_session.erl | 49 +++++++++++++++++++++----------- 2 files changed, 62 insertions(+), 33 deletions(-) diff --git a/priv/dev/static/scripts/login.js b/priv/dev/static/scripts/login.js index cf10a947..8980e07a 100644 --- a/priv/dev/static/scripts/login.js +++ b/priv/dev/static/scripts/login.js @@ -30,29 +30,41 @@ const error = url.searchParams.get('error'); const state = JSON.parse(url.searchParams.get('state')); if (code) { dderlState.xsrfToken = state.xsrfToken; - if (state.type == 'ouraRing') { - const body = { 'oura_ring_code': { 'code': code, 'state': state } }; - ajaxCall(null, 'oura_ring_code', body, 'oura_ring_code', + setTimeout(() => { + const body = { 'oauth2_callback': { 'code': code, 'state': state } }; + ajaxCall(null, 'oauth2_callback', body, 'oauth2_callback', function () { - window.close(); + // window.close(); }, function (error) { alert('Error fetching access token : ' + error); console.log('Error fetching access token', error); - window.close(); + // window.close(); }); - } else { - const body = { 'office_365_code': { 'code': code, 'state': state } }; - ajaxCall(null, 'office_365_code', body, 'office_365_code', - function () { - window.close(); - }, - function (error) { - alert('Error fetching access token : ' + error); - console.log('Error fetching access token', error); - window.close(); - }); - } + }, 2000); + // if (state.type == 'ouraRing') { + // const body = { 'oura_ring_code': { 'code': code, 'state': state } }; + // ajaxCall(null, 'oura_ring_code', body, 'oura_ring_code', + // function () { + // window.close(); + // }, + // function (error) { + // alert('Error fetching access token : ' + error); + // console.log('Error fetching access token', error); + // window.close(); + // }); + // } else { + // const body = { 'office_365_code': { 'code': code, 'state': state } }; + // ajaxCall(null, 'office_365_code', body, 'office_365_code', + // function () { + // window.close(); + // }, + // function (error) { + // alert('Error fetching access token : ' + error); + // console.log('Error fetching access token', error); + // window.close(); + // }); + // } } else if (error) { const errorDesc = url.searchParams.get('error_description'); alert('Login error : ' + error + ' : ' + errorDesc); diff --git a/src/dderl_session.erl b/src/dderl_session.erl index 8c23c65f..91b8e3a7 100644 --- a/src/dderl_session.erl +++ b/src/dderl_session.erl @@ -386,16 +386,16 @@ process_call({[<<"office_365_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, reply(From, #{<<"office_365_auth_config">> => #{<<"url">> => Url}}, self()), State; -process_call({[<<"office_365_code">>], ReqData}, _Adapter, From, {SrcIp, _}, State) -> - act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "format_json_to_save", args => ReqData}, State), - #{<<"office_365_code">> := #{<<"code">> := Code}} = jsx:decode(ReqData, [return_maps]), - case dperl_office_365:get_access_token(Code) of - ok -> - reply(From, #{<<"office_365_code">> => #{<<"status">> => <<"ok">>}}, self()); - {error, _Error} -> - reply(From, #{<<"office_365_code">> => #{<<"error">> => <<"Fetching token failed, Try again">>}}, self()) - end, - State; +% process_call({[<<"office_365_code">>], ReqData}, _Adapter, From, {SrcIp, _}, State) -> +% act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "format_json_to_save", args => ReqData}, State), +% #{<<"office_365_code">> := #{<<"code">> := Code}} = jsx:decode(ReqData, [return_maps]), +% case dperl_office_365:get_access_token(Code) of +% ok -> +% reply(From, #{<<"office_365_code">> => #{<<"status">> => <<"ok">>}}, self()); +% {error, _Error} -> +% reply(From, #{<<"office_365_code">> => #{<<"error">> => <<"Fetching token failed, Try again">>}}, self()) +% end, +% State; process_call({[<<"oura_ring_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, _}, State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "about"}, State), @@ -403,14 +403,28 @@ process_call({[<<"oura_ring_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, reply(From, #{<<"oura_ring_auth_config">> => #{<<"url">> => Url}}, self()), State; -process_call({[<<"oura_ring_code">>], ReqData}, _Adapter, From, {SrcIp, _}, State) -> +% process_call({[<<"oura_ring_code">>], ReqData}, _Adapter, From, {SrcIp, _}, State) -> +% act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "format_json_to_save", args => ReqData}, State), +% #{<<"oura_ring_code">> := #{<<"code">> := Code}} = jsx:decode(ReqData, [return_maps]), +% case dperl_ouraring_crawl:get_access_token(Code) of +% ok -> +% reply(From, #{<<"oura_ring_code">> => #{<<"status">> => <<"ok">>}}, self()); +% {error, _Error} -> +% reply(From, #{<<"oura_ring_code">> => #{<<"error">> => <<"Fetching token failed, Try again">>}}, self()) +% end, +% State; + +process_call({[<<"oauth2_callback">>], ReqData}, _Adapter, From, {SrcIp, _}, State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "format_json_to_save", args => ReqData}, State), - #{<<"oura_ring_code">> := #{<<"code">> := Code}} = jsx:decode(ReqData, [return_maps]), - case dperl_ouraring_crawl:get_access_token(Code) of + #{<<"oauth2_callback">> := + #{<<"code">> := Code, <<"state">> := #{<<"type">> := Type}}} = jsx:decode(ReqData, [return_maps]), + Module = oauth2_callback_module(Type), + case Module:get_access_token(Code) of ok -> - reply(From, #{<<"oura_ring_code">> => #{<<"status">> => <<"ok">>}}, self()); - {error, _Error} -> - reply(From, #{<<"oura_ring_code">> => #{<<"error">> => <<"Fetching token failed, Try again">>}}, self()) + reply(From, #{<<"oauth2_callback">> => #{<<"status">> => <<"ok">>}}, self()); + {error, Error} -> + ?Error("Fetching token : ~p:get_access_token(~p) : ~p", [Module, Code, Error]), + reply(From, #{<<"oauth2_callback">> => #{<<"error">> => <<"Fetching token failed, Try again">>}}, self()) end, State; @@ -896,6 +910,9 @@ cancel_timer(TRef) -> erlang:cancel_timer(TRef), ok. +oauth2_callback_module(<<"office365">>) -> dperl_office_365; +oauth2_callback_module(<<"ouraRing">>) -> dperl_office_365. + act_log(ReplyPid, LogLevel, Args, State) -> ReplyPid ! {access, Args#{userId => case Args of From 77b33f9cf45c1e7a4815428ff80f78999f6726c5 Mon Sep 17 00:00:00 2001 From: shamis Date: Tue, 26 May 2020 17:33:01 +0200 Subject: [PATCH 40/72] removed commented code, refactoring --- priv/dev/static/scripts/login.js | 49 ++++++------------------- src/dderl_session.erl | 24 +----------- src/dperl/dperl_dal.erl | 12 +++++- src/dperl/jobs/dperl_office_365.erl | 36 ++++++++---------- src/dperl/jobs/dperl_ouraring_crawl.erl | 23 +++++------- 5 files changed, 49 insertions(+), 95 deletions(-) diff --git a/priv/dev/static/scripts/login.js b/priv/dev/static/scripts/login.js index 8980e07a..ad0cbd6a 100644 --- a/priv/dev/static/scripts/login.js +++ b/priv/dev/static/scripts/login.js @@ -23,54 +23,29 @@ export function loginAjax(data = {}) { window.loginCb = loginCb; -// office 365 callback handler +// oauth2 callback handler const url = new URL(window.location.href); const code = url.searchParams.get('code'); const error = url.searchParams.get('error'); const state = JSON.parse(url.searchParams.get('state')); if (code) { dderlState.xsrfToken = state.xsrfToken; - setTimeout(() => { - const body = { 'oauth2_callback': { 'code': code, 'state': state } }; - ajaxCall(null, 'oauth2_callback', body, 'oauth2_callback', - function () { - // window.close(); - }, - function (error) { - alert('Error fetching access token : ' + error); - console.log('Error fetching access token', error); - // window.close(); - }); - }, 2000); - // if (state.type == 'ouraRing') { - // const body = { 'oura_ring_code': { 'code': code, 'state': state } }; - // ajaxCall(null, 'oura_ring_code', body, 'oura_ring_code', - // function () { - // window.close(); - // }, - // function (error) { - // alert('Error fetching access token : ' + error); - // console.log('Error fetching access token', error); - // window.close(); - // }); - // } else { - // const body = { 'office_365_code': { 'code': code, 'state': state } }; - // ajaxCall(null, 'office_365_code', body, 'office_365_code', - // function () { - // window.close(); - // }, - // function (error) { - // alert('Error fetching access token : ' + error); - // console.log('Error fetching access token', error); - // window.close(); - // }); - // } + const body = { 'oauth2_callback': { 'code': code, 'state': state } }; + ajaxCall(null, 'oauth2_callback', body, 'oauth2_callback', + function () { + window.close(); + }, + function (error) { + alert('Error fetching access token : ' + error); + console.log('Error fetching access token', error); + window.close(); + }); } else if (error) { const errorDesc = url.searchParams.get('error_description'); alert('Login error : ' + error + ' : ' + errorDesc); window.close(); } -// office 365 callback handler end +// oauth2 callback handler end function loginCb(resp) { try { diff --git a/src/dderl_session.erl b/src/dderl_session.erl index 91b8e3a7..754c0964 100644 --- a/src/dderl_session.erl +++ b/src/dderl_session.erl @@ -386,34 +386,12 @@ process_call({[<<"office_365_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, reply(From, #{<<"office_365_auth_config">> => #{<<"url">> => Url}}, self()), State; -% process_call({[<<"office_365_code">>], ReqData}, _Adapter, From, {SrcIp, _}, State) -> -% act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "format_json_to_save", args => ReqData}, State), -% #{<<"office_365_code">> := #{<<"code">> := Code}} = jsx:decode(ReqData, [return_maps]), -% case dperl_office_365:get_access_token(Code) of -% ok -> -% reply(From, #{<<"office_365_code">> => #{<<"status">> => <<"ok">>}}, self()); -% {error, _Error} -> -% reply(From, #{<<"office_365_code">> => #{<<"error">> => <<"Fetching token failed, Try again">>}}, self()) -% end, -% State; - process_call({[<<"oura_ring_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, _}, State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "about"}, State), Url = dperl_ouraring_crawl:get_authorize_url(State#state.xsrf_token), reply(From, #{<<"oura_ring_auth_config">> => #{<<"url">> => Url}}, self()), State; -% process_call({[<<"oura_ring_code">>], ReqData}, _Adapter, From, {SrcIp, _}, State) -> -% act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "format_json_to_save", args => ReqData}, State), -% #{<<"oura_ring_code">> := #{<<"code">> := Code}} = jsx:decode(ReqData, [return_maps]), -% case dperl_ouraring_crawl:get_access_token(Code) of -% ok -> -% reply(From, #{<<"oura_ring_code">> => #{<<"status">> => <<"ok">>}}, self()); -% {error, _Error} -> -% reply(From, #{<<"oura_ring_code">> => #{<<"error">> => <<"Fetching token failed, Try again">>}}, self()) -% end, -% State; - process_call({[<<"oauth2_callback">>], ReqData}, _Adapter, From, {SrcIp, _}, State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "format_json_to_save", args => ReqData}, State), #{<<"oauth2_callback">> := @@ -911,7 +889,7 @@ cancel_timer(TRef) -> ok. oauth2_callback_module(<<"office365">>) -> dperl_office_365; -oauth2_callback_module(<<"ouraRing">>) -> dperl_office_365. +oauth2_callback_module(<<"ouraRing">>) -> dperl_ouraring_crawl. act_log(ReplyPid, LogLevel, Args, State) -> ReplyPid ! {access, diff --git a/src/dperl/dperl_dal.erl b/src/dperl/dperl_dal.erl index 9c892ff7..a80c84ab 100644 --- a/src/dperl/dperl_dal.erl +++ b/src/dperl/dperl_dal.erl @@ -3,7 +3,7 @@ -include("dperl.hrl"). -export([select/2, subscribe/1, unsubscribe/1, write/2, check_table/5, - sql_jp_bind/1, sql_bind_jp_values/2, read_channel/2, + sql_jp_bind/1, sql_bind_jp_values/2, read_channel/2, url_enc_params/1, io_to_oci_datetime/1, create_check_channel/1, write_channel/3, read_check_write/4, read_audit_keys/3, read_audit/3, get_enabled/1, update_job_dyn/2, update_job_dyn/3, job_error_close/1, to_binary/1, @@ -667,6 +667,16 @@ activity_logger(StatusCtx, Name, Extra) -> ?JDebug("activity logger already running ~p", [Pid]) end. +-spec url_enc_params(map()) -> binary(). +url_enc_params(Params) -> + EParams = maps:fold( + fun(K, {enc, V}, Acc) -> + ["&", K, "=", http_uri:encode(V) | Acc]; + (K, V, Acc) -> + ["&", K, "=", V | Acc] + end, [], Params), + erlang:iolist_to_binary([tl(EParams)]). + %%------------------------------------------------------------------------------ %% private %%------------------------------------------------------------------------------ diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index f3a95826..ad17c03e 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -19,6 +19,9 @@ -export([get_authorize_url/1, get_access_token/1]). +% contacts graph api +% https://docs.microsoft.com/en-us/graph/api/resources/contact?view=graph-rest-1.0 + -record(state, {name, channel, is_connected = true, access_token, api_url, contacts = [], key_prefix, fetch_url, cl_contacts = [], is_cleanup_finished = true, push_channel, type = pull, @@ -45,20 +48,22 @@ set_token_info(TokenInfo) when is_binary(TokenInfo) -> dperl_dal:write_channel(<<"avatar">>, ["office365","token"], TokenInfo). get_authorize_url(XSRFToken) -> - State = #{xsrf_token => XSRFToken, type => <<"office365">>}, + State = #{xsrfToken => XSRFToken, type => <<"office365">>}, #{auth_url := Url, client_id := ClientId, redirect_uri := RedirectURI, scope := Scope} = get_office_365_auth_config(), - UrlParams = url_enc_params(#{"client_id" => ClientId, "redirect_uri" => {enc, RedirectURI}, - "scope" => {enc, Scope}, "state" => {enc, imem_json:encode(State)}}), + UrlParams = dperl_dal:url_enc_params( + #{"client_id" => ClientId, "redirect_uri" => {enc, RedirectURI}, + "scope" => {enc, Scope}, "state" => {enc, imem_json:encode(State)}}), erlang:iolist_to_binary([Url, "&", UrlParams]). get_access_token(Code) -> #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, client_secret := Secret, grant_type := GrantType, scope := Scope} = get_office_365_auth_config(), - Body = url_enc_params(#{"client_id" => ClientId, "scope" => {enc, Scope}, "code" => Code, - "redirect_uri" => {enc, RedirectURI}, "grant_type" => GrantType, - "client_secret" => {enc, Secret}}), + Body = dperl_dal:url_enc_params( + #{"client_id" => ClientId, "scope" => {enc, Scope}, "code" => Code, + "redirect_uri" => {enc, RedirectURI}, "grant_type" => GrantType, + "client_secret" => {enc, Secret}}), ContentType = "application/x-www-form-urlencoded", case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {_, _, TokenInfo}} -> @@ -76,9 +81,10 @@ connect_check_src(#state{is_connected = false} = State) -> #{token_url := TUrl, client_id := ClientId, client_secret := Secret, scope := Scope} = get_office_365_auth_config(), #{<<"refresh_token">> := RefreshToken} = get_token_info(), - Body = url_enc_params(#{"client_id" => ClientId, "scope" => {enc, Scope}, - "refresh_token" => RefreshToken, "grant_type" => "refresh_token", - "client_secret" => {enc, Secret}}), + Body = dperl_dal:url_enc_params( + #{"client_id" => ClientId, "scope" => {enc, Scope}, + "refresh_token" => RefreshToken, "grant_type" => "refresh_token", + "client_secret" => {enc, Secret}}), ContentType = "application/x-www-form-urlencoded", case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {{_, 200, "OK"}, _, TokenBody}} -> @@ -197,7 +203,7 @@ load_dst_after_key(CurKey, BlkCount, #state{channel = Channel}) -> load_src_after_key(CurKey, BlkCount, #state{fetch_url = undefined} = State) -> % https://graph.microsoft.com/v1.0/me/contacts/?$top=100&$select=displayName&orderby=displayName - UrlParams = url_enc_params(#{"$top" => integer_to_list(BlkCount)}), + UrlParams = dperl_dal:url_enc_params(#{"$top" => integer_to_list(BlkCount)}), ContactsUrl = erlang:iolist_to_binary([State#state.api_url, "?", UrlParams]), load_src_after_key(CurKey, BlkCount, State#state{fetch_url = ContactsUrl}); load_src_after_key(CurKey, BlkCount, #state{is_cleanup_finished = true, key_prefix = KeyPrefix, @@ -349,13 +355,3 @@ exec_req(Url, AccessToken, Body, Method) -> Error -> {error, Error} end. - - -url_enc_params(Params) -> - EParams = maps:fold( - fun(K, {enc, V}, Acc) -> - ["&", K, "=", http_uri:encode(V) | Acc]; - (K, V, Acc) -> - ["&", K, "=", V | Acc] - end, [], Params), - erlang:iolist_to_binary([tl(EParams)]). diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index 01e6763a..eeabe89a 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -52,15 +52,17 @@ get_authorize_url(XSRFToken) -> State = #{xsrfToken => XSRFToken, type => <<"ouraRing">>}, #{auth_url := Url, client_id := ClientId, redirect_uri := RedirectURI, scope := Scope} = get_oura_ring_auth_config(), - UrlParams = url_enc_params(#{"client_id" => ClientId, "state" => {enc, imem_json:encode(State)}, - "scope" => {enc, Scope},"redirect_uri" => {enc, RedirectURI}}), + UrlParams = dperl_dal:url_enc_params( + #{"client_id" => ClientId, "state" => {enc, imem_json:encode(State)}, + "scope" => {enc, Scope},"redirect_uri" => {enc, RedirectURI}}), erlang:iolist_to_binary([Url, "&", UrlParams]). get_access_token(Code) -> #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, client_secret := Secret, grant_type := GrantType} = get_oura_ring_auth_config(), - Body = url_enc_params(#{"client_id" => ClientId, "code" => Code, "redirect_uri" => {enc, RedirectURI}, - "client_secret" => {enc, Secret}, "grant_type" => GrantType}), + Body = dperl_dal:url_enc_params( + #{"client_id" => ClientId, "code" => Code, "redirect_uri" => {enc, RedirectURI}, + "client_secret" => {enc, Secret}, "grant_type" => GrantType}), ContentType = "application/x-www-form-urlencoded", case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {_, _, TokenInfo}} -> @@ -79,8 +81,9 @@ connect_check_src(#state{is_connected = false} = State) -> #{token_url := TUrl, client_id := ClientId, client_secret := Secret} = get_oura_ring_auth_config(), #{<<"refresh_token">> := RefreshToken} = get_token_info(), - Body = url_enc_params(#{"client_id" => ClientId, "client_secret" => {enc, Secret}, - "refresh_token" => RefreshToken, "grant_type" => "refresh_token"}), + Body = dperl_dal:url_enc_params( + #{"client_id" => ClientId, "client_secret" => {enc, Secret}, + "refresh_token" => RefreshToken, "grant_type" => "refresh_token"}), ContentType = "application/x-www-form-urlencoded", case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {{_, 200, "OK"}, _, TokenBody}} -> @@ -335,11 +338,3 @@ set_metric_day("readiness", Day, State) -> State#state{last_readiness_day = Day} build_key(Type, KeyPrefix) when is_list(Type), is_list(KeyPrefix)-> KeyPrefix ++ [Type]. -url_enc_params(Params) -> - EParams = maps:fold( - fun(K, {enc, V}, Acc) -> - ["&", K, "=", http_uri:encode(V) | Acc]; - (K, V, Acc) -> - ["&", K, "=", V | Acc] - end, [], Params), - erlang:iolist_to_binary([tl(EParams)]). From 231148d214d40be42d5852d5f811ce018addd5ab Mon Sep 17 00:00:00 2001 From: shamis Date: Tue, 26 May 2020 17:37:04 +0200 Subject: [PATCH 41/72] encHAsh code --- src/dperl/jobs/dperl_office_365.erl | 47 +++++++++++++++-------------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index ad17c03e..5541d5a0 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -238,29 +238,30 @@ get_status(#state{}) -> #{}. init_state(_) -> #state{}. -init({#dperlJob{name=Name, dstArgs = #{channel := Channel, push_channel := PChannel}, - srcArgs = #{api_url := ApiUrl} = SrcArgs, args = Args}, State}) -> - % case dperl_auth_cache:get_enc_hash(Name) of - % undefined -> - % ?JError("Encryption hash is not avaialable"), - % {stop, badarg}; - % {User, EncHash} -> - % ?JInfo("Starting with ~p's enchash...", [User]), - % imem_sec_mnesia:put_enc_hash(EncHash), - case get_token_info() of - #{<<"access_token">> := AccessToken} -> - ChannelBin = dperl_dal:to_binary(Channel), - PChannelBin = dperl_dal:to_binary(PChannel), - KeyPrefix = maps:get(key_prefix, SrcArgs, []), - Type = maps:get(type, Args, pull), - dperl_dal:create_check_channel(ChannelBin), - dperl_dal:create_check_channel(PChannelBin), - {ok, State#state{channel = ChannelBin, name = Name, api_url = ApiUrl, - key_prefix = KeyPrefix, access_token = AccessToken, - push_channel = PChannelBin, type = Type}}; - _ -> - ?JError("Access token not found"), - {stop, badarg} +init({#dperlJob{name=Name, srcArgs = #{api_url := ApiUrl}, args = Args, + dstArgs = #{channel := Channel, push_channel := PChannel} = DstArgs}, State}) -> + case dperl_auth_cache:get_enc_hash(Name) of + undefined -> + ?JError("Encryption hash is not avaialable"), + {stop, badarg}; + {User, EncHash} -> + ?JInfo("Starting with ~p's enchash...", [User]), + imem_sec_mnesia:put_enc_hash(EncHash), + case get_token_info() of + #{<<"access_token">> := AccessToken} -> + ChannelBin = dperl_dal:to_binary(Channel), + PChannelBin = dperl_dal:to_binary(PChannel), + KeyPrefix = maps:get(key_prefix, DstArgs, []), + Type = maps:get(type, Args, pull), + dperl_dal:create_check_channel(ChannelBin), + dperl_dal:create_check_channel(PChannelBin), + {ok, State#state{channel = ChannelBin, name = Name, api_url = ApiUrl, + key_prefix = KeyPrefix, access_token = AccessToken, + push_channel = PChannelBin, type = Type}}; + _ -> + ?JError("Access token not found"), + {stop, badarg} + end end; init(Args) -> ?JError("bad start parameters ~p", [Args]), From c195a757b7457a6ada8ff374ed13a4a52003d7ed Mon Sep 17 00:00:00 2001 From: shamis Date: Wed, 27 May 2020 09:14:38 +0200 Subject: [PATCH 42/72] cleanup only for pull job --- src/dperl/jobs/dperl_office_365.erl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index 5541d5a0..df49981b 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -228,6 +228,8 @@ reconnect_exec(State, Fun, Args) -> {error, Error, State1} end. +do_cleanup(_Deletes, _Inserts, _Diffs, _IsFinished, #state{type = push}) -> + {error, <<"cleanup only for pull job">>}; do_cleanup(Deletes, Inserts, Diffs, IsFinished, State) -> NewState = State#state{contacts = Inserts ++ Diffs ++ Deletes}, if IsFinished -> {ok, finish, NewState#state{is_cleanup_finished = true}}; From c194bd9cc46449c660bdf98eada9bd5164d731a4 Mon Sep 17 00:00:00 2001 From: shamis Date: Wed, 27 May 2020 10:13:13 +0200 Subject: [PATCH 43/72] resolved case clause problem --- src/dperl/jobs/dperl_ouraring_crawl.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index eeabe89a..b339777e 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -235,7 +235,7 @@ fetch_metric(Type, Day, #state{api_url = ApiUrl, access_token = AccessToken} = S {ok, _} -> fetch_metric(Type, NextDay, State); _Other -> - State + none end; {ok, Metric} -> Key = build_key(Type, State#state.key_prefix), From 25d8bf0fc0f93c1abc71f0045096d807810518f5 Mon Sep 17 00:00:00 2001 From: shamis Date: Thu, 28 May 2020 15:45:57 +0200 Subject: [PATCH 44/72] code refactoring --- src/dderl.hrl | 6 ++ src/dderl_oauth.erl | 84 ++++++++++++++++++ src/dderl_session.erl | 18 ++-- src/dperl/jobs/dperl_office_365.erl | 92 ++++++-------------- src/dperl/jobs/dperl_ouraring_crawl.erl | 111 ++++++++++-------------- 5 files changed, 169 insertions(+), 142 deletions(-) create mode 100644 src/dderl_oauth.erl diff --git a/src/dderl.hrl b/src/dderl.hrl index 51c976e1..00a9920c 100644 --- a/src/dderl.hrl +++ b/src/dderl.hrl @@ -322,4 +322,10 @@ end)() ). +% OAUTH + +-define(OURARING, <<"ouraRing">>). + +-define(OFFICE365, <<"office365">>). + -endif. diff --git a/src/dderl_oauth.erl b/src/dderl_oauth.erl new file mode 100644 index 00000000..021f7a0c --- /dev/null +++ b/src/dderl_oauth.erl @@ -0,0 +1,84 @@ +-module(dderl_oauth). + +-include("dderl.hrl"). + +-define(OFFICE_365_AUTH_CONFIG, + ?GET_CONFIG(office365AuthConfig,[], + #{auth_url =>"https://login.microsoftonline.com/common/oauth2/v2.0/authorize?response_type=code&response_mode=query", + client_id => "12345", redirect_uri => "https://localhost:8443/dderl/", client_secret => "12345", grant_type => "authorization_code", + token_url => "https://login.microsoftonline.com/common/oauth2/v2.0/token", + scope => "offline_access https://graph.microsoft.com/people.read"}, + "Office 365 (Graph API) auth config")). + +-define(OURA_RING_AUTH_CONFIG, + ?GET_CONFIG(ouraRingAuthConfig,[], + #{auth_url =>"https://cloud.ouraring.com/oauth/authorize?response_type=code", + client_id => "12345", redirect_uri => "https://localhost:8443/dderl/", + client_secret => "12345", grant_type => "authorization_code", + token_url => "https://cloud.ouraring.com/oauth/token", + scope => "email personal daily"}, + "Oura Ring auth config")). + +-export([get_authorize_url/2, get_access_token/2, get_token_info/1, refresh_access_token/1]). + +get_auth_config(?OFFICE365) -> ?OFFICE_365_AUTH_CONFIG; +get_auth_config(?OURARING) -> ?OURA_RING_AUTH_CONFIG. + +get_token_info(Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> + dperl_dal:read_channel(<<"avatar">>, [binary_to_list(Type),"token"]). + +set_token_info(TokenInfo, Type) when is_map(TokenInfo) -> + set_token_info(imem_json:encode(TokenInfo), Type); +set_token_info(TokenInfo, Type) when is_list(TokenInfo) -> + set_token_info(list_to_binary(TokenInfo), Type); +set_token_info(TokenInfo, Type) when is_binary(TokenInfo), (Type == ?OFFICE365 orelse Type == ?OURARING) -> + dperl_dal:create_check_channel(<<"avatar">>), + dperl_dal:write_channel(<<"avatar">>, [binary_to_list(Type), "token"], TokenInfo). + +get_authorize_url(XSRFToken, Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> + State = #{xsrfToken => XSRFToken, type => Type}, + #{auth_url := Url, client_id := ClientId, redirect_uri := RedirectURI, + scope := Scope} = get_auth_config(Type), + UrlParams = dperl_dal:url_enc_params( + #{"client_id" => ClientId, "redirect_uri" => {enc, RedirectURI}, + "scope" => {enc, Scope}, "state" => {enc, imem_json:encode(State)}}), + erlang:iolist_to_binary([Url, "&", UrlParams]). + +get_access_token(Code, Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> + #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, + client_secret := Secret, grant_type := GrantType, + scope := Scope} = get_auth_config(Type), + Body = dperl_dal:url_enc_params( + #{"client_id" => ClientId, "scope" => {enc, Scope}, "code" => Code, + "redirect_uri" => {enc, RedirectURI}, "grant_type" => GrantType, + "client_secret" => {enc, Secret}}), + ContentType = "application/x-www-form-urlencoded", + case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of + {ok, {{_, 200, "OK"}, _, TokenInfo}} -> + set_token_info(TokenInfo, Type), + ok; + {ok, {{_, Code, _}, _, Error}} -> + ?Error("Fetching access token : ~p:~p", [Code, Error]), + {error, Error}; + {error, Error} -> + ?Error("Fetching access token : ~p", [Error]), + {error, Error} + end. + +refresh_access_token(Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> + #{token_url := TUrl, client_id := ClientId, scope := Scope, + client_secret := Secret} = get_auth_config(Type), + #{<<"refresh_token">> := RefreshToken} = get_token_info(Type), + Body = dperl_dal:url_enc_params( + #{"client_id" => ClientId, "client_secret" => {enc, Secret}, "scope" => {enc, Scope}, + "refresh_token" => RefreshToken, "grant_type" => "refresh_token"}), + ContentType = "application/x-www-form-urlencoded", + case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of + {ok, {{_, 200, "OK"}, _, TokenBody}} -> + TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), + set_token_info(TokenBody, Type), + #{<<"access_token">> := AccessToken} = TokenInfo, + {ok, AccessToken}; + Error -> + {error, Error} + end. diff --git a/src/dderl_session.erl b/src/dderl_session.erl index 754c0964..a5fe9704 100644 --- a/src/dderl_session.erl +++ b/src/dderl_session.erl @@ -381,27 +381,26 @@ process_call({[<<"about">>], _ReqData}, _Adapter, From, {SrcIp,_}, State) -> State; process_call({[<<"office_365_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, _}, State) -> - act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "about"}, State), - Url = dperl_office_365:get_authorize_url(State#state.xsrf_token), + act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "office_365_auth_config"}, State), + Url = dderl_oauth:get_authorize_url(State#state.xsrf_token, ?OFFICE365), reply(From, #{<<"office_365_auth_config">> => #{<<"url">> => Url}}, self()), State; process_call({[<<"oura_ring_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, _}, State) -> - act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "about"}, State), - Url = dperl_ouraring_crawl:get_authorize_url(State#state.xsrf_token), + act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "oura_ring_auth_config"}, State), + Url = dderl_oauth:get_authorize_url(State#state.xsrf_token, ?OURARING), reply(From, #{<<"oura_ring_auth_config">> => #{<<"url">> => Url}}, self()), State; process_call({[<<"oauth2_callback">>], ReqData}, _Adapter, From, {SrcIp, _}, State) -> - act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "format_json_to_save", args => ReqData}, State), + act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "oauth2_callback", args => ReqData}, State), #{<<"oauth2_callback">> := #{<<"code">> := Code, <<"state">> := #{<<"type">> := Type}}} = jsx:decode(ReqData, [return_maps]), - Module = oauth2_callback_module(Type), - case Module:get_access_token(Code) of + case dderl_oauth:get_access_token(Code, Type) of ok -> reply(From, #{<<"oauth2_callback">> => #{<<"status">> => <<"ok">>}}, self()); {error, Error} -> - ?Error("Fetching token : ~p:get_access_token(~p) : ~p", [Module, Code, Error]), + ?Error("Fetching token : ~p", [Error]), reply(From, #{<<"oauth2_callback">> => #{<<"error">> => <<"Fetching token failed, Try again">>}}, self()) end, State; @@ -888,9 +887,6 @@ cancel_timer(TRef) -> erlang:cancel_timer(TRef), ok. -oauth2_callback_module(<<"office365">>) -> dperl_office_365; -oauth2_callback_module(<<"ouraRing">>) -> dperl_ouraring_crawl. - act_log(ReplyPid, LogLevel, Args, State) -> ReplyPid ! {access, Args#{userId => case Args of diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index df49981b..3f7f4caa 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -5,20 +5,10 @@ -behavior(dperl_worker). -behavior(dperl_strategy_scr). --define(OFFICE_365_AUTH_CONFIG, - ?GET_CONFIG(office365AuthConfig,[], - #{auth_url =>"https://login.microsoftonline.com/common/oauth2/v2.0/authorize?response_type=code&response_mode=query", - client_id => "12345", redirect_uri => "https://localhost:8443/dderl/", client_secret => "12345", grant_type => "authorization_code", - token_url => "https://login.microsoftonline.com/common/oauth2/v2.0/token", - scope => "offline_access https://graph.microsoft.com/people.read"}, - "Office 365 (Graph API) auth config")). - % dperl_worker exports -export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, get_status/1, init_state/1]). --export([get_authorize_url/1, get_access_token/1]). - % contacts graph api % https://docs.microsoft.com/en-us/graph/api/resources/contact?view=graph-rest-1.0 @@ -33,70 +23,38 @@ fetch_src/2, fetch_dst/2, delete_dst/2, insert_dst/3, update_dst/3, report_status/3]). -get_office_365_auth_config() -> - ?OFFICE_365_AUTH_CONFIG. - -get_token_info() -> - dperl_dal:read_channel(<<"avatar">>, ["office365","token"]). - -set_token_info(TokenInfo) when is_map(TokenInfo) -> - set_token_info(imem_json:encode(TokenInfo)); -set_token_info(TokenInfo) when is_list(TokenInfo) -> - set_token_info(list_to_binary(TokenInfo)); -set_token_info(TokenInfo) when is_binary(TokenInfo) -> - dperl_dal:create_check_channel(<<"avatar">>), - dperl_dal:write_channel(<<"avatar">>, ["office365","token"], TokenInfo). - -get_authorize_url(XSRFToken) -> - State = #{xsrfToken => XSRFToken, type => <<"office365">>}, - #{auth_url := Url, client_id := ClientId, redirect_uri := RedirectURI, - scope := Scope} = get_office_365_auth_config(), - UrlParams = dperl_dal:url_enc_params( - #{"client_id" => ClientId, "redirect_uri" => {enc, RedirectURI}, - "scope" => {enc, Scope}, "state" => {enc, imem_json:encode(State)}}), - erlang:iolist_to_binary([Url, "&", UrlParams]). - -get_access_token(Code) -> - #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, - client_secret := Secret, grant_type := GrantType, - scope := Scope} = get_office_365_auth_config(), - Body = dperl_dal:url_enc_params( - #{"client_id" => ClientId, "scope" => {enc, Scope}, "code" => Code, - "redirect_uri" => {enc, RedirectURI}, "grant_type" => GrantType, - "client_secret" => {enc, Secret}}), - ContentType = "application/x-www-form-urlencoded", - case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of - {ok, {_, _, TokenInfo}} -> - set_token_info(TokenInfo), - ok; - {error, Error} -> - ?Error("Fetching access token : ~p", [Error]), - {error, Error} - end. - connect_check_src(#state{is_connected = true} = State) -> {ok, State}; connect_check_src(#state{is_connected = false} = State) -> ?JTrace("Refreshing access token"), - #{token_url := TUrl, client_id := ClientId, client_secret := Secret, - scope := Scope} = get_office_365_auth_config(), - #{<<"refresh_token">> := RefreshToken} = get_token_info(), - Body = dperl_dal:url_enc_params( - #{"client_id" => ClientId, "scope" => {enc, Scope}, - "refresh_token" => RefreshToken, "grant_type" => "refresh_token", - "client_secret" => {enc, Secret}}), - ContentType = "application/x-www-form-urlencoded", - case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of - {ok, {{_, 200, "OK"}, _, TokenBody}} -> - TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), - set_token_info(TokenBody), - #{<<"access_token">> := AccessToken} = TokenInfo, - ?JInfo("new access token fetched"), + case dderl_oauth:refresh_access_token(?OFFICE365) of + {ok, AccessToken} -> + ?Info("new access token fetched"), {ok, State#state{access_token = AccessToken, is_connected = true}}; - Error -> + {error, Error} -> ?JError("Unexpected response : ~p", [Error]), {error, Error, State} end. + % ?JTrace("Refreshing access token"), + % #{token_url := TUrl, client_id := ClientId, client_secret := Secret, + % scope := Scope} = get_office_365_auth_config(), + % #{<<"refresh_token">> := RefreshToken} = get_token_info(), + % Body = dperl_dal:url_enc_params( + % #{"client_id" => ClientId, "scope" => {enc, Scope}, + % "refresh_token" => RefreshToken, "grant_type" => "refresh_token", + % "client_secret" => {enc, Secret}}), + % ContentType = "application/x-www-form-urlencoded", + % case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of + % {ok, {{_, 200, "OK"}, _, TokenBody}} -> + % TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), + % set_token_info(TokenBody), + % #{<<"access_token">> := AccessToken} = TokenInfo, + % ?JInfo("new access token fetched"), + % {ok, State#state{access_token = AccessToken, is_connected = true}}; + % Error -> + % ?JError("Unexpected response : ~p", [Error]), + % {error, Error, State} + % end. get_source_events(#state{audit_start_time = LastStartTime, type = push, push_channel = PChannel} = State, BulkSize) -> @@ -249,7 +207,7 @@ init({#dperlJob{name=Name, srcArgs = #{api_url := ApiUrl}, args = Args, {User, EncHash} -> ?JInfo("Starting with ~p's enchash...", [User]), imem_sec_mnesia:put_enc_hash(EncHash), - case get_token_info() of + case dderl_oauth:get_token_info(?OFFICE365) of #{<<"access_token">> := AccessToken} -> ChannelBin = dperl_dal:to_binary(Channel), PChannelBin = dperl_dal:to_binary(PChannel), diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index b339777e..7a3684bd 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -10,20 +10,11 @@ "Days to be shifted backwards for starting the job") ). --define(OURA_RING_AUTH_CONFIG, - ?GET_CONFIG(ouraRingAuthConfig,[], - #{auth_url =>"https://cloud.ouraring.com/oauth/authorize?response_type=code", - client_id => "12345", redirect_uri => "https://localhost:8443/dderl/", - client_secret => "12345", grant_type => "authorization_code", - token_url => "https://cloud.ouraring.com/oauth/token", - scope => "email personal daily"}, - "Oura Ring auth config")). - % dperl_worker exports -export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, get_status/1, init_state/1]). --export([get_authorize_url/1, get_access_token/1]). +% -export([get_authorize_url/1, get_access_token/1]). -record(state, {name, channel, is_connected = true, access_token, api_url, last_sleep_day, last_activity_day, last_readiness_day, @@ -34,65 +25,54 @@ do_cleanup/2, do_refresh/2, fetch_src/2, fetch_dst/2, delete_dst/2, insert_dst/3, update_dst/3, report_status/3]). -get_oura_ring_auth_config() -> - ?OURA_RING_AUTH_CONFIG. - -get_token_info() -> - dperl_dal:read_channel(<<"avatar">>, ["ouraRing","token"]). - -set_token_info(TokenInfo) when is_map(TokenInfo) -> - set_token_info(imem_json:encode(TokenInfo)); -set_token_info(TokenInfo) when is_list(TokenInfo) -> - set_token_info(list_to_binary(TokenInfo)); -set_token_info(TokenInfo) when is_binary(TokenInfo) -> - dperl_dal:create_check_channel(<<"avatar">>), - dperl_dal:write_channel(<<"avatar">>, ["ouraRing","token"], TokenInfo). - -get_authorize_url(XSRFToken) -> - State = #{xsrfToken => XSRFToken, type => <<"ouraRing">>}, - #{auth_url := Url, client_id := ClientId, redirect_uri := RedirectURI, - scope := Scope} = get_oura_ring_auth_config(), - UrlParams = dperl_dal:url_enc_params( - #{"client_id" => ClientId, "state" => {enc, imem_json:encode(State)}, - "scope" => {enc, Scope},"redirect_uri" => {enc, RedirectURI}}), - erlang:iolist_to_binary([Url, "&", UrlParams]). - -get_access_token(Code) -> - #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, - client_secret := Secret, grant_type := GrantType} = get_oura_ring_auth_config(), - Body = dperl_dal:url_enc_params( - #{"client_id" => ClientId, "code" => Code, "redirect_uri" => {enc, RedirectURI}, - "client_secret" => {enc, Secret}, "grant_type" => GrantType}), - ContentType = "application/x-www-form-urlencoded", - case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of - {ok, {_, _, TokenInfo}} -> - set_token_info(TokenInfo), - ok; - {error, Error} -> - ?Error("Fetching access token : ~p", [Error]), - {error, Error} - end. - +% get_oura_ring_auth_config() -> +% ?OURA_RING_AUTH_CONFIG. + +% get_token_info() -> +% dperl_dal:read_channel(<<"avatar">>, ["ouraRing","token"]). + +% set_token_info(TokenInfo) when is_map(TokenInfo) -> +% set_token_info(imem_json:encode(TokenInfo)); +% set_token_info(TokenInfo) when is_list(TokenInfo) -> +% set_token_info(list_to_binary(TokenInfo)); +% set_token_info(TokenInfo) when is_binary(TokenInfo) -> +% dperl_dal:create_check_channel(<<"avatar">>), +% dperl_dal:write_channel(<<"avatar">>, ["ouraRing","token"], TokenInfo). + +% get_authorize_url(XSRFToken) -> +% State = #{xsrfToken => XSRFToken, type => <<"ouraRing">>}, +% #{auth_url := Url, client_id := ClientId, redirect_uri := RedirectURI, +% scope := Scope} = get_oura_ring_auth_config(), +% UrlParams = dperl_dal:url_enc_params( +% #{"client_id" => ClientId, "state" => {enc, imem_json:encode(State)}, +% "scope" => {enc, Scope},"redirect_uri" => {enc, RedirectURI}}), +% erlang:iolist_to_binary([Url, "&", UrlParams]). + +% get_access_token(Code) -> +% #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, +% client_secret := Secret, grant_type := GrantType} = get_oura_ring_auth_config(), +% Body = dperl_dal:url_enc_params( +% #{"client_id" => ClientId, "code" => Code, "redirect_uri" => {enc, RedirectURI}, +% "client_secret" => {enc, Secret}, "grant_type" => GrantType}), +% ContentType = "application/x-www-form-urlencoded", +% case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of +% {ok, {_, _, TokenInfo}} -> +% set_token_info(TokenInfo), +% ok; +% {error, Error} -> +% ?Error("Fetching access token : ~p", [Error]), +% {error, Error} +% end. connect_check_src(#state{is_connected = true} = State) -> {ok, State}; connect_check_src(#state{is_connected = false} = State) -> ?JTrace("Refreshing access token"), - #{token_url := TUrl, client_id := ClientId, - client_secret := Secret} = get_oura_ring_auth_config(), - #{<<"refresh_token">> := RefreshToken} = get_token_info(), - Body = dperl_dal:url_enc_params( - #{"client_id" => ClientId, "client_secret" => {enc, Secret}, - "refresh_token" => RefreshToken, "grant_type" => "refresh_token"}), - ContentType = "application/x-www-form-urlencoded", - case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of - {ok, {{_, 200, "OK"}, _, TokenBody}} -> - TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), - set_token_info(TokenBody), - #{<<"access_token">> := AccessToken} = TokenInfo, - ?JInfo("new access token fetched"), + case dderl_oauth:refresh_access_token(?OURARING) of + {ok, AccessToken} -> + ?Info("new access token fetched"), {ok, State#state{access_token = AccessToken, is_connected = true}}; - Error -> + {error, Error} -> ?JError("Unexpected response : ~p", [Error]), {error, Error, State} end. @@ -168,7 +148,7 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel} = DstArgs, {User, EncHash} -> ?JInfo("Starting with ~p's enchash...", [User]), imem_sec_mnesia:put_enc_hash(EncHash), - case get_token_info() of + case dderl_oauth:get_token_info(?OURARING) of #{<<"access_token">> := AccessToken} -> ChannelBin = dperl_dal:to_binary(Channel), KeyPrefix = maps:get(key_prefix, DstArgs, []), @@ -308,6 +288,9 @@ exec_req(Url, AccessToken) -> case httpc:request(get, {Url, AuthHeader}, [], []) of {ok, {{_, 200, "OK"}, _, Result}} -> imem_json:decode(list_to_binary(Result), [return_maps]); + {ok, {{_, 401, _}, _, Error}} -> + ?JError("Unauthorized body : ~s", [Error]), + {error, unauthorized}; Error -> {error, Error} end. From 34e2e78bef0a8cbff39ecf81c352d8d6ae413045 Mon Sep 17 00:00:00 2001 From: shamis Date: Thu, 28 May 2020 15:48:51 +0200 Subject: [PATCH 45/72] removed commented code --- src/dperl/jobs/dperl_office_365.erl | 20 ------------ src/dperl/jobs/dperl_ouraring_crawl.erl | 41 ------------------------- 2 files changed, 61 deletions(-) diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index 3f7f4caa..8341ee27 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -35,26 +35,6 @@ connect_check_src(#state{is_connected = false} = State) -> ?JError("Unexpected response : ~p", [Error]), {error, Error, State} end. - % ?JTrace("Refreshing access token"), - % #{token_url := TUrl, client_id := ClientId, client_secret := Secret, - % scope := Scope} = get_office_365_auth_config(), - % #{<<"refresh_token">> := RefreshToken} = get_token_info(), - % Body = dperl_dal:url_enc_params( - % #{"client_id" => ClientId, "scope" => {enc, Scope}, - % "refresh_token" => RefreshToken, "grant_type" => "refresh_token", - % "client_secret" => {enc, Secret}}), - % ContentType = "application/x-www-form-urlencoded", - % case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of - % {ok, {{_, 200, "OK"}, _, TokenBody}} -> - % TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), - % set_token_info(TokenBody), - % #{<<"access_token">> := AccessToken} = TokenInfo, - % ?JInfo("new access token fetched"), - % {ok, State#state{access_token = AccessToken, is_connected = true}}; - % Error -> - % ?JError("Unexpected response : ~p", [Error]), - % {error, Error, State} - % end. get_source_events(#state{audit_start_time = LastStartTime, type = push, push_channel = PChannel} = State, BulkSize) -> diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index 7a3684bd..4dd23027 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -14,8 +14,6 @@ -export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, get_status/1, init_state/1]). -% -export([get_authorize_url/1, get_access_token/1]). - -record(state, {name, channel, is_connected = true, access_token, api_url, last_sleep_day, last_activity_day, last_readiness_day, infos = [], key_prefix}). @@ -25,45 +23,6 @@ do_cleanup/2, do_refresh/2, fetch_src/2, fetch_dst/2, delete_dst/2, insert_dst/3, update_dst/3, report_status/3]). -% get_oura_ring_auth_config() -> -% ?OURA_RING_AUTH_CONFIG. - -% get_token_info() -> -% dperl_dal:read_channel(<<"avatar">>, ["ouraRing","token"]). - -% set_token_info(TokenInfo) when is_map(TokenInfo) -> -% set_token_info(imem_json:encode(TokenInfo)); -% set_token_info(TokenInfo) when is_list(TokenInfo) -> -% set_token_info(list_to_binary(TokenInfo)); -% set_token_info(TokenInfo) when is_binary(TokenInfo) -> -% dperl_dal:create_check_channel(<<"avatar">>), -% dperl_dal:write_channel(<<"avatar">>, ["ouraRing","token"], TokenInfo). - -% get_authorize_url(XSRFToken) -> -% State = #{xsrfToken => XSRFToken, type => <<"ouraRing">>}, -% #{auth_url := Url, client_id := ClientId, redirect_uri := RedirectURI, -% scope := Scope} = get_oura_ring_auth_config(), -% UrlParams = dperl_dal:url_enc_params( -% #{"client_id" => ClientId, "state" => {enc, imem_json:encode(State)}, -% "scope" => {enc, Scope},"redirect_uri" => {enc, RedirectURI}}), -% erlang:iolist_to_binary([Url, "&", UrlParams]). - -% get_access_token(Code) -> -% #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, -% client_secret := Secret, grant_type := GrantType} = get_oura_ring_auth_config(), -% Body = dperl_dal:url_enc_params( -% #{"client_id" => ClientId, "code" => Code, "redirect_uri" => {enc, RedirectURI}, -% "client_secret" => {enc, Secret}, "grant_type" => GrantType}), -% ContentType = "application/x-www-form-urlencoded", -% case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of -% {ok, {_, _, TokenInfo}} -> -% set_token_info(TokenInfo), -% ok; -% {error, Error} -> -% ?Error("Fetching access token : ~p", [Error]), -% {error, Error} -% end. - connect_check_src(#state{is_connected = true} = State) -> {ok, State}; connect_check_src(#state{is_connected = false} = State) -> From d3aeb8306383ec33a9860ebdd63a3e344a83355b Mon Sep 17 00:00:00 2001 From: shamis Date: Thu, 28 May 2020 16:22:08 +0200 Subject: [PATCH 46/72] imem_sec_mnesia renamed to imem_enc_mnesia --- src/dderl_fsm.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dderl_fsm.erl b/src/dderl_fsm.erl index e123a6d8..c47ec67e 100644 --- a/src/dderl_fsm.erl +++ b/src/dderl_fsm.erl @@ -1743,7 +1743,7 @@ handle_call(cache_data, From, SN, #state{tableId = TableId, ctx=#ctx{rowCols=Row handle_call(inspect_status, From, SN, State) -> {next_state, SN, State, [{reply, From, SN}]}; handle_call({put_enc_hash, EncHash}, From, SN, State) -> - imem_sec_mnesia:put_enc_hash(EncHash), + imem_enc_mnesia:put_enc_hash(EncHash), {next_state, SN, State, [{reply, From, ok}]}; handle_call(inspect_state, From, SN, State) -> {next_state, SN, State, [{reply, From, State}]}; From f4547103a7abba3219fcbe93cea9b37f159727b8 Mon Sep 17 00:00:00 2001 From: shamis Date: Thu, 28 May 2020 16:23:14 +0200 Subject: [PATCH 47/72] imem_sec_mnesia renamed to imem_enc_mnesia --- src/dperl/jobs/dperl_office_365.erl | 2 +- src/dperl/jobs/dperl_ouraring_crawl.erl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index 8341ee27..c87b9549 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -186,7 +186,7 @@ init({#dperlJob{name=Name, srcArgs = #{api_url := ApiUrl}, args = Args, {stop, badarg}; {User, EncHash} -> ?JInfo("Starting with ~p's enchash...", [User]), - imem_sec_mnesia:put_enc_hash(EncHash), + imem_enc_mnesia:put_enc_hash(EncHash), case dderl_oauth:get_token_info(?OFFICE365) of #{<<"access_token">> := AccessToken} -> ChannelBin = dperl_dal:to_binary(Channel), diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index 4dd23027..55a248ed 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -106,7 +106,7 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel} = DstArgs, {stop, badarg}; {User, EncHash} -> ?JInfo("Starting with ~p's enchash...", [User]), - imem_sec_mnesia:put_enc_hash(EncHash), + imem_enc_mnesia:put_enc_hash(EncHash), case dderl_oauth:get_token_info(?OURARING) of #{<<"access_token">> := AccessToken} -> ChannelBin = dperl_dal:to_binary(Channel), From 6422c7234cdac7c850e325d7d827fc0899613105 Mon Sep 17 00:00:00 2001 From: stoch Date: Fri, 29 May 2020 09:06:01 +0200 Subject: [PATCH 48/72] move to imem_enc_mnesia --- src/dderl_fsm.erl | 2 +- src/dperl/jobs/dperl_office_365.erl | 2 +- src/dperl/jobs/dperl_ouraring_crawl.erl | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/dderl_fsm.erl b/src/dderl_fsm.erl index e123a6d8..c47ec67e 100644 --- a/src/dderl_fsm.erl +++ b/src/dderl_fsm.erl @@ -1743,7 +1743,7 @@ handle_call(cache_data, From, SN, #state{tableId = TableId, ctx=#ctx{rowCols=Row handle_call(inspect_status, From, SN, State) -> {next_state, SN, State, [{reply, From, SN}]}; handle_call({put_enc_hash, EncHash}, From, SN, State) -> - imem_sec_mnesia:put_enc_hash(EncHash), + imem_enc_mnesia:put_enc_hash(EncHash), {next_state, SN, State, [{reply, From, ok}]}; handle_call(inspect_state, From, SN, State) -> {next_state, SN, State, [{reply, From, State}]}; diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index df49981b..11685506 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -248,7 +248,7 @@ init({#dperlJob{name=Name, srcArgs = #{api_url := ApiUrl}, args = Args, {stop, badarg}; {User, EncHash} -> ?JInfo("Starting with ~p's enchash...", [User]), - imem_sec_mnesia:put_enc_hash(EncHash), + imem_enc_mnesia:put_enc_hash(EncHash), case get_token_info() of #{<<"access_token">> := AccessToken} -> ChannelBin = dperl_dal:to_binary(Channel), diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index b339777e..98baf123 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -167,7 +167,7 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel} = DstArgs, {stop, badarg}; {User, EncHash} -> ?JInfo("Starting with ~p's enchash...", [User]), - imem_sec_mnesia:put_enc_hash(EncHash), + imem_enc_mnesia:put_enc_hash(EncHash), case get_token_info() of #{<<"access_token">> := AccessToken} -> ChannelBin = dperl_dal:to_binary(Channel), From f817beb892225b671dceb8414851fd7fc4d4f52e Mon Sep 17 00:00:00 2001 From: shamis Date: Mon, 1 Jun 2020 12:25:50 +0200 Subject: [PATCH 49/72] not using hardcoded avatar table --- src/dderl_dal.erl | 27 +++++++++++++++++++++++ src/dderl_oauth.erl | 29 ++++++++++++------------- src/dderl_session.erl | 3 ++- src/dperl/jobs/dperl_office_365.erl | 14 ++++++------ src/dperl/jobs/dperl_ouraring_crawl.erl | 14 ++++++------ 5 files changed, 57 insertions(+), 30 deletions(-) diff --git a/src/dderl_dal.erl b/src/dderl_dal.erl index e0722b15..1944ee6d 100644 --- a/src/dderl_dal.erl +++ b/src/dderl_dal.erl @@ -45,6 +45,9 @@ ,get_d3_templates_path/1 ,get_host_app/0 ,is_proxy/2 + ,create_check_avatar_table/1 + ,write_to_avatar_table/3 + ,read_from_avatar_table/2 ]). -record(state, { schema :: term() @@ -60,6 +63,10 @@ -define(USE_CONN(__ConnId), {dderl, conn, {conn, __ConnId}, use}). -define(USE_LOCAL_CONN, {dderl, conn, local, use}). +-define(GET_AVATAR_TABLE(__USERNAME), + ?GET_CONFIG(__USERNAME,[], <<__USERNAME/binary, "_avatar">>,"SAML - flag to verify response signature")). + + %% Validate this permission. -define(USE_ADAPTER, {dderl, adapter, {id, __AdaptId}, use}). @@ -849,6 +856,26 @@ is_proxy(AppId, NetCtx) -> _ -> false end. +get_avatar_table(Username) when is_atom(Username) -> + get_avatar_table(atom_to_binary(Username, utf8)); +get_avatar_table(Username) when is_list(Username) -> + get_avatar_table(list_to_binary(Username)); +get_avatar_table(Username) when is_binary(Username) -> + ?GET_AVATAR_TABLE(Username). + +create_check_avatar_table(Username) -> + AvatarTable = get_avatar_table(Username), + imem_dal_skvh:create_check_channel(AvatarTable, []). + % imem_dal_skvh:create_check_channel(AvatarTable, [encrypted]). + +write_to_avatar_table(Username, Key, Value) -> + AvatarTable = get_avatar_table(Username), + imem_dal_skvh:write(Username, AvatarTable, Key, Value). + +read_from_avatar_table(Username, Key) -> + AvatarTable = get_avatar_table(Username), + imem_dal_skvh:read(Username, AvatarTable, Key). + -spec exec_is_proxy_fun(reference(), map()) -> boolean(). exec_is_proxy_fun(Fun, NetCtx) -> case catch Fun(NetCtx) of diff --git a/src/dderl_oauth.erl b/src/dderl_oauth.erl index 021f7a0c..3699a6fe 100644 --- a/src/dderl_oauth.erl +++ b/src/dderl_oauth.erl @@ -19,21 +19,20 @@ scope => "email personal daily"}, "Oura Ring auth config")). --export([get_authorize_url/2, get_access_token/2, get_token_info/1, refresh_access_token/1]). +-export([get_authorize_url/2, get_access_token/3, get_token_info/2, refresh_access_token/2]). get_auth_config(?OFFICE365) -> ?OFFICE_365_AUTH_CONFIG; get_auth_config(?OURARING) -> ?OURA_RING_AUTH_CONFIG. -get_token_info(Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> - dperl_dal:read_channel(<<"avatar">>, [binary_to_list(Type),"token"]). +get_token_info(Username, Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> + dderl_dal:read_from_avatar_table(Username, [binary_to_list(Type),"token"]). -set_token_info(TokenInfo, Type) when is_map(TokenInfo) -> - set_token_info(imem_json:encode(TokenInfo), Type); -set_token_info(TokenInfo, Type) when is_list(TokenInfo) -> - set_token_info(list_to_binary(TokenInfo), Type); -set_token_info(TokenInfo, Type) when is_binary(TokenInfo), (Type == ?OFFICE365 orelse Type == ?OURARING) -> - dperl_dal:create_check_channel(<<"avatar">>), - dperl_dal:write_channel(<<"avatar">>, [binary_to_list(Type), "token"], TokenInfo). +set_token_info(Username, TokenInfo, Type) when is_map(TokenInfo) -> + set_token_info(Username, imem_json:encode(TokenInfo), Type); +set_token_info(Username, TokenInfo, Type) when is_list(TokenInfo) -> + set_token_info(Username, list_to_binary(TokenInfo), Type); +set_token_info(Username, TokenInfo, Type) when is_binary(TokenInfo), (Type == ?OFFICE365 orelse Type == ?OURARING) -> + dderl_dal:write_to_avatar_table(Username, [binary_to_list(Type), "token"], TokenInfo). get_authorize_url(XSRFToken, Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> State = #{xsrfToken => XSRFToken, type => Type}, @@ -44,7 +43,7 @@ get_authorize_url(XSRFToken, Type) when Type == ?OFFICE365 orelse Type == ?OURAR "scope" => {enc, Scope}, "state" => {enc, imem_json:encode(State)}}), erlang:iolist_to_binary([Url, "&", UrlParams]). -get_access_token(Code, Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> +get_access_token(Username, Code, Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, client_secret := Secret, grant_type := GrantType, scope := Scope} = get_auth_config(Type), @@ -55,7 +54,7 @@ get_access_token(Code, Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> ContentType = "application/x-www-form-urlencoded", case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {{_, 200, "OK"}, _, TokenInfo}} -> - set_token_info(TokenInfo, Type), + set_token_info(Username, TokenInfo, Type), ok; {ok, {{_, Code, _}, _, Error}} -> ?Error("Fetching access token : ~p:~p", [Code, Error]), @@ -65,10 +64,10 @@ get_access_token(Code, Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> {error, Error} end. -refresh_access_token(Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> +refresh_access_token(Username, Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> #{token_url := TUrl, client_id := ClientId, scope := Scope, client_secret := Secret} = get_auth_config(Type), - #{<<"refresh_token">> := RefreshToken} = get_token_info(Type), + #{<<"refresh_token">> := RefreshToken} = get_token_info(Username, Type), Body = dperl_dal:url_enc_params( #{"client_id" => ClientId, "client_secret" => {enc, Secret}, "scope" => {enc, Scope}, "refresh_token" => RefreshToken, "grant_type" => "refresh_token"}), @@ -76,7 +75,7 @@ refresh_access_token(Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {{_, 200, "OK"}, _, TokenBody}} -> TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), - set_token_info(TokenBody, Type), + set_token_info(Username, TokenBody, Type), #{<<"access_token">> := AccessToken} = TokenInfo, {ok, AccessToken}; Error -> diff --git a/src/dderl_session.erl b/src/dderl_session.erl index a5fe9704..6b043c52 100644 --- a/src/dderl_session.erl +++ b/src/dderl_session.erl @@ -396,7 +396,7 @@ process_call({[<<"oauth2_callback">>], ReqData}, _Adapter, From, {SrcIp, _}, Sta act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "oauth2_callback", args => ReqData}, State), #{<<"oauth2_callback">> := #{<<"code">> := Code, <<"state">> := #{<<"type">> := Type}}} = jsx:decode(ReqData, [return_maps]), - case dderl_oauth:get_access_token(Code, Type) of + case dderl_oauth:get_access_token(State#state.user, Code, Type) of ok -> reply(From, #{<<"oauth2_callback">> => #{<<"status">> => <<"ok">>}}, self()); {error, Error} -> @@ -853,6 +853,7 @@ login(ReqData, From, SrcIp, State) -> _ -> {[UserId],true} = imem_meta:select(ddAccount, [{#ddAccount{name=State#state.user, id='$1',_='_'}, [], ['$1']}]), + ok = dderl_dal:create_check_avatar_table(State#state.user), act_log(From, ?LOGIN_CONNECT, #{src => SrcIp, userId => UserId, cmd_resp => "login success", cmd => "login"}, State), diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index c87b9549..fc32e41e 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -15,7 +15,7 @@ -record(state, {name, channel, is_connected = true, access_token, api_url, contacts = [], key_prefix, fetch_url, cl_contacts = [], is_cleanup_finished = true, push_channel, type = pull, - audit_start_time = {0,0}, first_sync = true}). + audit_start_time = {0,0}, first_sync = true, username}). % dperl_strategy_scr export -export([connect_check_src/1, get_source_events/2, connect_check_dst/1, @@ -25,9 +25,9 @@ connect_check_src(#state{is_connected = true} = State) -> {ok, State}; -connect_check_src(#state{is_connected = false} = State) -> +connect_check_src(#state{is_connected = false, username = Username} = State) -> ?JTrace("Refreshing access token"), - case dderl_oauth:refresh_access_token(?OFFICE365) of + case dderl_oauth:refresh_access_token(Username, ?OFFICE365) of {ok, AccessToken} -> ?Info("new access token fetched"), {ok, State#state{access_token = AccessToken, is_connected = true}}; @@ -184,10 +184,10 @@ init({#dperlJob{name=Name, srcArgs = #{api_url := ApiUrl}, args = Args, undefined -> ?JError("Encryption hash is not avaialable"), {stop, badarg}; - {User, EncHash} -> - ?JInfo("Starting with ~p's enchash...", [User]), + {Username, EncHash} -> + ?JInfo("Starting with ~p's enchash...", [Username]), imem_enc_mnesia:put_enc_hash(EncHash), - case dderl_oauth:get_token_info(?OFFICE365) of + case dderl_oauth:get_token_info(Username, ?OFFICE365) of #{<<"access_token">> := AccessToken} -> ChannelBin = dperl_dal:to_binary(Channel), PChannelBin = dperl_dal:to_binary(PChannel), @@ -197,7 +197,7 @@ init({#dperlJob{name=Name, srcArgs = #{api_url := ApiUrl}, args = Args, dperl_dal:create_check_channel(PChannelBin), {ok, State#state{channel = ChannelBin, name = Name, api_url = ApiUrl, key_prefix = KeyPrefix, access_token = AccessToken, - push_channel = PChannelBin, type = Type}}; + push_channel = PChannelBin, type = Type, username = Username}}; _ -> ?JError("Access token not found"), {stop, badarg} diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index 55a248ed..a4f06513 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -16,7 +16,7 @@ -record(state, {name, channel, is_connected = true, access_token, api_url, last_sleep_day, last_activity_day, last_readiness_day, - infos = [], key_prefix}). + infos = [], key_prefix, username}). % dperl_strategy_scr export -export([connect_check_src/1, get_source_events/2, connect_check_dst/1, @@ -25,9 +25,9 @@ connect_check_src(#state{is_connected = true} = State) -> {ok, State}; -connect_check_src(#state{is_connected = false} = State) -> +connect_check_src(#state{is_connected = false, username = Username} = State) -> ?JTrace("Refreshing access token"), - case dderl_oauth:refresh_access_token(?OURARING) of + case dderl_oauth:refresh_access_token(Username, ?OURARING) of {ok, AccessToken} -> ?Info("new access token fetched"), {ok, State#state{access_token = AccessToken, is_connected = true}}; @@ -104,15 +104,15 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel} = DstArgs, undefined -> ?JError("Encryption hash is not avaialable"), {stop, badarg}; - {User, EncHash} -> - ?JInfo("Starting with ~p's enchash...", [User]), + {Username, EncHash} -> + ?JInfo("Starting with ~p's enchash...", [Username]), imem_enc_mnesia:put_enc_hash(EncHash), - case dderl_oauth:get_token_info(?OURARING) of + case dderl_oauth:get_token_info(Username, ?OURARING) of #{<<"access_token">> := AccessToken} -> ChannelBin = dperl_dal:to_binary(Channel), KeyPrefix = maps:get(key_prefix, DstArgs, []), dperl_dal:create_check_channel(ChannelBin), - {ok, State#state{channel = ChannelBin, api_url = ApiUrl, + {ok, State#state{channel = ChannelBin, api_url = ApiUrl, username = Username, key_prefix = KeyPrefix, access_token = AccessToken}}; _ -> ?JError("Access token not found"), From 6540bd05bb8f1666069824a3083d6fb9fb8a8845 Mon Sep 17 00:00:00 2001 From: shamis Date: Tue, 2 Jun 2020 20:34:17 +0200 Subject: [PATCH 50/72] read_avatar table issue ffixed --- src/dderl_dal.erl | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/dderl_dal.erl b/src/dderl_dal.erl index 1944ee6d..d3bc7254 100644 --- a/src/dderl_dal.erl +++ b/src/dderl_dal.erl @@ -874,7 +874,14 @@ write_to_avatar_table(Username, Key, Value) -> read_from_avatar_table(Username, Key) -> AvatarTable = get_avatar_table(Username), - imem_dal_skvh:read(Username, AvatarTable, Key). + case imem_dal_skvh:read(Username, AvatarTable, [Key]) of + [#{cvalue := CValue}] when is_binary(CValue) -> + imem_json:decode(CValue, [return_maps]); + [#{cvalue := CValue}] -> + CValue; + Other -> + Other + end. -spec exec_is_proxy_fun(reference(), map()) -> boolean(). exec_is_proxy_fun(Fun, NetCtx) -> From cbf3a167e04875a8b0294eb0f8e8b8b2ecb0c43c Mon Sep 17 00:00:00 2001 From: shamis Date: Wed, 3 Jun 2020 20:00:55 +0200 Subject: [PATCH 51/72] reverting last commit --- src/dderl_dal.erl | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/src/dderl_dal.erl b/src/dderl_dal.erl index d3bc7254..1944ee6d 100644 --- a/src/dderl_dal.erl +++ b/src/dderl_dal.erl @@ -874,14 +874,7 @@ write_to_avatar_table(Username, Key, Value) -> read_from_avatar_table(Username, Key) -> AvatarTable = get_avatar_table(Username), - case imem_dal_skvh:read(Username, AvatarTable, [Key]) of - [#{cvalue := CValue}] when is_binary(CValue) -> - imem_json:decode(CValue, [return_maps]); - [#{cvalue := CValue}] -> - CValue; - Other -> - Other - end. + imem_dal_skvh:read(Username, AvatarTable, Key). -spec exec_is_proxy_fun(reference(), map()) -> boolean(). exec_is_proxy_fun(Fun, NetCtx) -> From 572b0ab3f2d3ea7351c553534ee3b5e119f63f0b Mon Sep 17 00:00:00 2001 From: stoch Date: Thu, 4 Jun 2020 19:10:02 +0200 Subject: [PATCH 52/72] change default config lookup, instance KeyPrefixes not yet supported --- src/dderl.hrl | 6 +- src/dderl_dal.erl | 51 +++++++------- src/dderl_oauth.erl | 78 ++++++++++---------- src/dderl_session.erl | 53 +++++++++----- src/dperl/dperl.hrl | 3 +- src/dperl/jobs/dperl_file_copy.erl | 2 +- src/dperl/jobs/dperl_office_365.erl | 94 ++++++++++++++++++++----- src/dperl/jobs/dperl_ouraring_crawl.erl | 93 ++++++++++++++++++------ src/imem_adapter.erl | 2 +- 9 files changed, 255 insertions(+), 127 deletions(-) diff --git a/src/dderl.hrl b/src/dderl.hrl index 00a9920c..50782fac 100644 --- a/src/dderl.hrl +++ b/src/dderl.hrl @@ -3,6 +3,8 @@ -include_lib("imem/include/imem_meta.hrl"). -include_lib("imem/include/imem_exports.hrl"). +% -include("dderl/_checkouts/imem/include/imem_config.hrl"). + -define(DEFAULT_ROW_SIZE, 100). -record(viewstate, @@ -324,8 +326,8 @@ % OAUTH --define(OURARING, <<"ouraRing">>). +-define(SYNC_OURARING, dperl_ouraring_crawl). --define(OFFICE365, <<"office365">>). +-define(SYNC_OFFICE365, dperl_office_365). -endif. diff --git a/src/dderl_dal.erl b/src/dderl_dal.erl index 1944ee6d..b962596a 100644 --- a/src/dderl_dal.erl +++ b/src/dderl_dal.erl @@ -45,9 +45,9 @@ ,get_d3_templates_path/1 ,get_host_app/0 ,is_proxy/2 - ,create_check_avatar_table/1 - ,write_to_avatar_table/3 - ,read_from_avatar_table/2 + ,create_check_avatar_channel/1 + ,write_to_avatar_channel/3 + ,read_from_avatar_channel/2 ]). -record(state, { schema :: term() @@ -63,9 +63,11 @@ -define(USE_CONN(__ConnId), {dderl, conn, {conn, __ConnId}, use}). -define(USE_LOCAL_CONN, {dderl, conn, local, use}). --define(GET_AVATAR_TABLE(__USERNAME), - ?GET_CONFIG(__USERNAME,[], <<__USERNAME/binary, "_avatar">>,"SAML - flag to verify response signature")). +-define(GET_AVATAR_CHANNEL_PREFIX, + ?GET_CONFIG(avatarChannelPrefix,[], <<"ph">>,"PRIVACY_HUB - binary string prefix for avatar channel name")). +-define(GET_AVATAR_CHANNEL_OPTIONS, + ?GET_CONFIG(avatarChannelOptions,[], [encrypted,audit,history],"PRIVACY_HUB - table options for avatar channel")). %% Validate this permission. -define(USE_ADAPTER, {dderl, adapter, {id, __AdaptId}, use}). @@ -856,25 +858,26 @@ is_proxy(AppId, NetCtx) -> _ -> false end. -get_avatar_table(Username) when is_atom(Username) -> - get_avatar_table(atom_to_binary(Username, utf8)); -get_avatar_table(Username) when is_list(Username) -> - get_avatar_table(list_to_binary(Username)); -get_avatar_table(Username) when is_binary(Username) -> - ?GET_AVATAR_TABLE(Username). - -create_check_avatar_table(Username) -> - AvatarTable = get_avatar_table(Username), - imem_dal_skvh:create_check_channel(AvatarTable, []). - % imem_dal_skvh:create_check_channel(AvatarTable, [encrypted]). - -write_to_avatar_table(Username, Key, Value) -> - AvatarTable = get_avatar_table(Username), - imem_dal_skvh:write(Username, AvatarTable, Key, Value). - -read_from_avatar_table(Username, Key) -> - AvatarTable = get_avatar_table(Username), - imem_dal_skvh:read(Username, AvatarTable, Key). +avatar_channel_name(AccountId) when is_atom(AccountId) -> + avatar_channel_name(atom_to_binary(AccountId, utf8)); +avatar_channel_name(AccountId) when is_list(AccountId) -> + avatar_channel_name(list_to_binary(AccountId)); +avatar_channel_name(AccountId) when is_binary(AccountId) -> + Prefix = ?GET_AVATAR_CHANNEL_PREFIX, + <>. + +% -spec create_check_avatar_channel(ddEntityId()) -> binary(). +create_check_avatar_channel(AccountId) -> + imem_dal_skvh:create_check_channel(avatar_channel_name(AccountId), ?GET_AVATAR_CHANNEL_OPTIONS). + +write_to_avatar_channel(AccountId, Key, Value) -> + imem_dal_skvh:write(AccountId, avatar_channel_name(AccountId), Key, Value). + +read_from_avatar_channel(AccountId, Key) -> + case imem_dal_skvh:read(AccountId, avatar_channel_name(AccountId), [Key]) of + [#{cvalue := CValue}] when is_binary(CValue) -> imem_json:decode(CValue, [return_maps]); + [#{cvalue := CValue}] when is_map(CValue)-> CValue + end. -spec exec_is_proxy_fun(reference(), map()) -> boolean(). exec_is_proxy_fun(Fun, NetCtx) -> diff --git a/src/dderl_oauth.erl b/src/dderl_oauth.erl index 3699a6fe..78e0dbff 100644 --- a/src/dderl_oauth.erl +++ b/src/dderl_oauth.erl @@ -1,60 +1,54 @@ -module(dderl_oauth). -include("dderl.hrl"). +% -include("dderl/_checkouts/imem/include/imem_config.hrl"). --define(OFFICE_365_AUTH_CONFIG, - ?GET_CONFIG(office365AuthConfig,[], - #{auth_url =>"https://login.microsoftonline.com/common/oauth2/v2.0/authorize?response_type=code&response_mode=query", - client_id => "12345", redirect_uri => "https://localhost:8443/dderl/", client_secret => "12345", grant_type => "authorization_code", - token_url => "https://login.microsoftonline.com/common/oauth2/v2.0/token", - scope => "offline_access https://graph.microsoft.com/people.read"}, - "Office 365 (Graph API) auth config")). +-define(TOKEN_KEYPART, "#token#"). --define(OURA_RING_AUTH_CONFIG, - ?GET_CONFIG(ouraRingAuthConfig,[], - #{auth_url =>"https://cloud.ouraring.com/oauth/authorize?response_type=code", - client_id => "12345", redirect_uri => "https://localhost:8443/dderl/", - client_secret => "12345", grant_type => "authorization_code", - token_url => "https://cloud.ouraring.com/oauth/token", - scope => "email personal daily"}, - "Oura Ring auth config")). +-export([ get_authorize_url/3 + , get_access_token/4 + , get_token_info/3 + , refresh_access_token/3 + ]). --export([get_authorize_url/2, get_access_token/3, get_token_info/2, refresh_access_token/2]). +get_token_info(AccountId, KeyPrefix, _SyncType) -> + dderl_dal:read_from_avatar_channel(AccountId, KeyPrefix ++ [?TOKEN_KEYPART]). -get_auth_config(?OFFICE365) -> ?OFFICE_365_AUTH_CONFIG; -get_auth_config(?OURARING) -> ?OURA_RING_AUTH_CONFIG. +set_token_info(AccountId, KeyPrefix, TokenInfo, SyncType) when is_map(TokenInfo) -> + set_token_info(AccountId, KeyPrefix, imem_json:encode(TokenInfo), SyncType); +set_token_info(AccountId, KeyPrefix, TokenInfo, SyncType) when is_list(TokenInfo) -> + set_token_info(AccountId, KeyPrefix, list_to_binary(TokenInfo), SyncType); +set_token_info(AccountId, KeyPrefix, TokenInfo, _SyncType) when is_binary(TokenInfo) -> + dderl_dal:write_to_avatar_channel(AccountId, KeyPrefix ++ [?TOKEN_KEYPART], TokenInfo). -get_token_info(Username, Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> - dderl_dal:read_from_avatar_table(Username, [binary_to_list(Type),"token"]). - -set_token_info(Username, TokenInfo, Type) when is_map(TokenInfo) -> - set_token_info(Username, imem_json:encode(TokenInfo), Type); -set_token_info(Username, TokenInfo, Type) when is_list(TokenInfo) -> - set_token_info(Username, list_to_binary(TokenInfo), Type); -set_token_info(Username, TokenInfo, Type) when is_binary(TokenInfo), (Type == ?OFFICE365 orelse Type == ?OURARING) -> - dderl_dal:write_to_avatar_table(Username, [binary_to_list(Type), "token"], TokenInfo). - -get_authorize_url(XSRFToken, Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> - State = #{xsrfToken => XSRFToken, type => Type}, - #{auth_url := Url, client_id := ClientId, redirect_uri := RedirectURI, - scope := Scope} = get_auth_config(Type), +get_authorize_url(XSRFToken, AuthConfig, SyncType) -> + State = #{xsrfToken => XSRFToken, type => SyncType}, + #{auth_url:=Url, client_id:=ClientId, redirect_uri:=RedirectURI, scope:=Scope} = AuthConfig, UrlParams = dperl_dal:url_enc_params( #{"client_id" => ClientId, "redirect_uri" => {enc, RedirectURI}, "scope" => {enc, Scope}, "state" => {enc, imem_json:encode(State)}}), erlang:iolist_to_binary([Url, "&", UrlParams]). -get_access_token(Username, Code, Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> +get_access_token(AccountId, KeyPrefix, Code, SyncType) -> + AuthConfig = try + SyncType:get_auth_config() % ToDo: AuthConfig may depend on JobName or KeyPrefix + catch + _:E:S -> + ?Error("Finding AuthConfig : ~p ñ~p", [E,S]), + {error, E} + end, + ?Info("get_access_token AuthConfig: ~p",[AuthConfig]), #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, - client_secret := Secret, grant_type := GrantType, - scope := Scope} = get_auth_config(Type), + client_secret := Secret, grant_type := GrantType, + scope := Scope} = AuthConfig, Body = dperl_dal:url_enc_params( #{"client_id" => ClientId, "scope" => {enc, Scope}, "code" => Code, - "redirect_uri" => {enc, RedirectURI}, "grant_type" => GrantType, - "client_secret" => {enc, Secret}}), + "redirect_uri" => {enc, RedirectURI}, "grant_type" => GrantType, + "client_secret" => {enc, Secret}}), ContentType = "application/x-www-form-urlencoded", case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {{_, 200, "OK"}, _, TokenInfo}} -> - set_token_info(Username, TokenInfo, Type), + set_token_info(AccountId, KeyPrefix, TokenInfo, SyncType), ok; {ok, {{_, Code, _}, _, Error}} -> ?Error("Fetching access token : ~p:~p", [Code, Error]), @@ -64,10 +58,10 @@ get_access_token(Username, Code, Type) when Type == ?OFFICE365 orelse Type == ?O {error, Error} end. -refresh_access_token(Username, Type) when Type == ?OFFICE365 orelse Type == ?OURARING -> +refresh_access_token(AccountId, KeyPrefix, SyncType) -> #{token_url := TUrl, client_id := ClientId, scope := Scope, - client_secret := Secret} = get_auth_config(Type), - #{<<"refresh_token">> := RefreshToken} = get_token_info(Username, Type), + client_secret := Secret} = SyncType:get_auth_config(), + #{<<"refresh_token">> := RefreshToken} = get_token_info(AccountId, KeyPrefix, SyncType), Body = dperl_dal:url_enc_params( #{"client_id" => ClientId, "client_secret" => {enc, Secret}, "scope" => {enc, Scope}, "refresh_token" => RefreshToken, "grant_type" => "refresh_token"}), @@ -75,7 +69,7 @@ refresh_access_token(Username, Type) when Type == ?OFFICE365 orelse Type == ?OUR case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {{_, 200, "OK"}, _, TokenBody}} -> TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), - set_token_info(Username, TokenBody, Type), + set_token_info(AccountId, KeyPrefix, TokenBody, SyncType), #{<<"access_token">> := AccessToken} = TokenInfo, {ok, AccessToken}; Error -> diff --git a/src/dderl_session.erl b/src/dderl_session.erl index 6b043c52..e65b4fb4 100644 --- a/src/dderl_session.erl +++ b/src/dderl_session.erl @@ -35,7 +35,7 @@ , inactive_tref :: timer:tref() , user = <<>> :: binary() , user_id :: ddEntityId() - , sess :: {atom, pid()} + , sess :: {atom(), pid()} , active_sender :: pid() , active_receiver :: pid() , downloads = [] :: integer() @@ -382,21 +382,35 @@ process_call({[<<"about">>], _ReqData}, _Adapter, From, {SrcIp,_}, State) -> process_call({[<<"office_365_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, _}, State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "office_365_auth_config"}, State), - Url = dderl_oauth:get_authorize_url(State#state.xsrf_token, ?OFFICE365), + AuthConfig = dperl_office_365:get_auth_config(), % ToDo: may depend on JobName or KeyPrefix + Url = dderl_oauth:get_authorize_url(State#state.xsrf_token, AuthConfig, ?SYNC_OFFICE365), reply(From, #{<<"office_365_auth_config">> => #{<<"url">> => Url}}, self()), State; process_call({[<<"oura_ring_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, _}, State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "oura_ring_auth_config"}, State), - Url = dderl_oauth:get_authorize_url(State#state.xsrf_token, ?OURARING), + AuthConfig = dperl_ouraring_crawl:get_auth_config(), % ToDo: may depend on JobName or KeyPrefix + ?Info("oura_ring_auth_config ~p",[AuthConfig]), + Url = dderl_oauth:get_authorize_url(State#state.xsrf_token, AuthConfig, ?SYNC_OURARING), reply(From, #{<<"oura_ring_auth_config">> => #{<<"url">> => Url}}, self()), State; process_call({[<<"oauth2_callback">>], ReqData}, _Adapter, From, {SrcIp, _}, State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "oauth2_callback", args => ReqData}, State), #{<<"oauth2_callback">> := - #{<<"code">> := Code, <<"state">> := #{<<"type">> := Type}}} = jsx:decode(ReqData, [return_maps]), - case dderl_oauth:get_access_token(State#state.user, Code, Type) of + #{<<"code">> := Code, <<"state">> := #{<<"type">> := SyncType}}} = jsx:decode(ReqData, [return_maps]), + ?Info("oauth2_callback SyncType: ~p Code: ~p",[SyncType, Code]), + % ToDo: Check if this data can this be trusted + {SyncHandler,KeyPrefix} = try + SH = binary_to_existing_atom(SyncType,utf8), + {SH,SH:get_key_prefix()} % ToDo: may depend on JobName or KeyPrefix + catch + _:E:_ -> + ?Error("Finding KeyPrefix : ~p", [E]), + reply(From, #{<<"oauth2_callback">> => #{<<"error">> => <<"Error finding KeyPrefix">>}}, self()) + end, + ?Info("oauth2_callback KeyPrefix: ~p",[KeyPrefix]), + case dderl_oauth:get_access_token(State#state.user, KeyPrefix, Code, SyncHandler) of ok -> reply(From, #{<<"oauth2_callback">> => #{<<"status">> => <<"ok">>}}, self()); {error, Error} -> @@ -787,7 +801,7 @@ login(ReqData, From, SrcIp, State) -> Reply0 = #{vsn => list_to_binary(Vsn), app => HostApp, node => Node, host => Host, rowNumLimit => imem_sql_expr:rownum_limit()}, - case catch erlimem_session:run_cmd(ErlImemSess, login,[]) of + case catch erlimem_session:run_cmd(ErlImemSess, login3,[]) of {error,{{'SecurityException',{?PasswordChangeNeeded,_}},ST}} -> ?Warn("Password expired ~s~n~p", [State#state.user, ST]), {[UserId],true} = imem_meta:select(ddAccount, [{#ddAccount{name=State#state.user, @@ -850,17 +864,22 @@ login(ReqData, From, SrcIp, State) -> reply(From, #{login => #{error => format_error(ErrMsg)}}, self()), State end; - _ -> - {[UserId],true} = imem_meta:select(ddAccount, [{#ddAccount{name=State#state.user, - id='$1',_='_'}, [], ['$1']}]), - ok = dderl_dal:create_check_avatar_table(State#state.user), - act_log(From, ?LOGIN_CONNECT, - #{src => SrcIp, userId => UserId, cmd_resp => "login success", - cmd => "login"}, State), - if is_map(ReqData) -> {#{accountName=>State#state.user}, State#state{user_id = UserId}}; - true -> - reply(From, #{login => maps:merge(Reply0, #{accountName=>State#state.user})}, self()), - State#state{user_id = UserId} + {_SKey, AccountId, EncHash} -> + ?Info("Login Result SKey:~p AccountId:~p EncHash:~p",[_SKey,AccountId,EncHash]), + ok = dderl_dal:create_check_avatar_channel(AccountId), + imem_enc_mnesia:put_enc_hash(EncHash), + act_log(From, ?LOGIN_CONNECT + , #{src=>SrcIp, userId=>AccountId, cmd_resp=>"login success", cmd=>"login"} + , State), + if + is_map(ReqData) -> + {#{accountName=>State#state.user}, State#state{user_id = AccountId}}; + true -> + reply(From + , #{login => maps:merge(Reply0, #{accountName=>State#state.user})} + , self() + ), + State#state{user_id = AccountId} end end. diff --git a/src/dperl/dperl.hrl b/src/dperl/dperl.hrl index 03f08875..97fc8942 100644 --- a/src/dperl/dperl.hrl +++ b/src/dperl/dperl.hrl @@ -2,7 +2,8 @@ -define(_dperl_HRL_, true). -define(LOG_TAG, "_dperl_"). --include_lib("dderl/src/dderl.hrl"). + +-include("../dderl.hrl"). % -include_lib("dderl/src/dderl.hrl"). -type plan() :: at_most_once|at_least_once|on_all_nodes. diff --git a/src/dperl/jobs/dperl_file_copy.erl b/src/dperl/jobs/dperl_file_copy.erl index c05aa0a8..4cb12f59 100644 --- a/src/dperl/jobs/dperl_file_copy.erl +++ b/src/dperl/jobs/dperl_file_copy.erl @@ -1,6 +1,6 @@ -module(dperl_file_copy). --include_lib("dperl/dperl.hrl"). +-include_lib("../dperl.hrl"). -behavior(dperl_worker). diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index fc32e41e..938ca86d 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -1,36 +1,92 @@ -module(dperl_office_365). --include_lib("dperl/dperl.hrl"). +-include_lib("../dperl.hrl"). -behavior(dperl_worker). -behavior(dperl_strategy_scr). +-define(AUTH_CONFIG(__JOB_NAME), + ?GET_CONFIG(office365AuthConfig,[__JOB_NAME], + #{auth_url =>"https://login.microsoftonline.com/common/oauth2/v2.0/authorize?response_type=code&response_mode=query", + client_id => "12345", redirect_uri => "https://localhost:8443/dderl/", client_secret => "12345", grant_type => "authorization_code", + token_url => "https://login.microsoftonline.com/common/oauth2/v2.0/token", + scope => "offline_access https://graph.microsoft.com/people.read"}, + "Office 365 (Graph API) auth config")). + +-define(KEY_PREFIX(__JOB_NAME), + ?GET_CONFIG(keyPrefix, [__JOB_NAME], ["people","Office365"], + "Default KeyPrefix for Office365 data") + ). + % dperl_worker exports --export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, - get_status/1, init_state/1]). +-export([ init/1 + , terminate/2 + , handle_call/3 + , handle_cast/2 + , handle_info/2 + , get_status/1 + , init_state/1 + ]). % contacts graph api % https://docs.microsoft.com/en-us/graph/api/resources/contact?view=graph-rest-1.0 --record(state, {name, channel, is_connected = true, access_token, api_url, - contacts = [], key_prefix, fetch_url, cl_contacts = [], - is_cleanup_finished = true, push_channel, type = pull, - audit_start_time = {0,0}, first_sync = true, username}). +-record(state, { name + , channel + , is_connected = true + , access_token + , api_url + , contacts = [] + , key_prefix + , fetch_url + , cl_contacts = [] + , is_cleanup_finished = true + , push_channel + , type = pull + , audit_start_time = {0,0} + , first_sync = true + , accountId + }). % dperl_strategy_scr export --export([connect_check_src/1, get_source_events/2, connect_check_dst/1, - do_cleanup/5, do_refresh/2, load_src_after_key/3, load_dst_after_key/3, - fetch_src/2, fetch_dst/2, delete_dst/2, insert_dst/3, - update_dst/3, report_status/3]). +-export([ connect_check_src/1 + , get_source_events/2 + , connect_check_dst/1 + , do_cleanup/5 + , do_refresh/2 + , load_src_after_key/3 + , load_dst_after_key/3 + , fetch_src/2 + , fetch_dst/2 + , delete_dst/2 + , insert_dst/3 + , update_dst/3 + , report_status/3 + ]). + +% dderl_oauth exports +-export([ get_auth_config/0 + , get_auth_config/1 + , get_key_prefix/0 + , get_key_prefix/1 + ]). + +get_auth_config() -> ?AUTH_CONFIG(<<>>). + +get_auth_config(JobName) -> ?AUTH_CONFIG(JobName). + +get_key_prefix() -> ?KEY_PREFIX(<<>>). + +get_key_prefix(JobName) -> ?KEY_PREFIX(JobName). connect_check_src(#state{is_connected = true} = State) -> {ok, State}; -connect_check_src(#state{is_connected = false, username = Username} = State) -> +connect_check_src(#state{is_connected=false, accountId=AccountId, key_prefix=KeyPrefix} = State) -> ?JTrace("Refreshing access token"), - case dderl_oauth:refresh_access_token(Username, ?OFFICE365) of + case dderl_oauth:refresh_access_token(AccountId, KeyPrefix, ?SYNC_OFFICE365) of {ok, AccessToken} -> ?Info("new access token fetched"), - {ok, State#state{access_token = AccessToken, is_connected = true}}; + {ok, State#state{access_token=AccessToken, is_connected=true}}; {error, Error} -> ?JError("Unexpected response : ~p", [Error]), {error, Error, State} @@ -184,20 +240,20 @@ init({#dperlJob{name=Name, srcArgs = #{api_url := ApiUrl}, args = Args, undefined -> ?JError("Encryption hash is not avaialable"), {stop, badarg}; - {Username, EncHash} -> - ?JInfo("Starting with ~p's enchash...", [Username]), + {AccountId, EncHash} -> + ?JInfo("Starting with ~p's enchash...", [AccountId]), imem_enc_mnesia:put_enc_hash(EncHash), - case dderl_oauth:get_token_info(Username, ?OFFICE365) of + KeyPrefix = maps:get(key_prefix, DstArgs, get_key_prefix(Name)), + case dderl_oauth:get_token_info(AccountId, KeyPrefix, ?SYNC_OFFICE365) of #{<<"access_token">> := AccessToken} -> ChannelBin = dperl_dal:to_binary(Channel), PChannelBin = dperl_dal:to_binary(PChannel), - KeyPrefix = maps:get(key_prefix, DstArgs, []), Type = maps:get(type, Args, pull), dperl_dal:create_check_channel(ChannelBin), dperl_dal:create_check_channel(PChannelBin), {ok, State#state{channel = ChannelBin, name = Name, api_url = ApiUrl, key_prefix = KeyPrefix, access_token = AccessToken, - push_channel = PChannelBin, type = Type, username = Username}}; + push_channel = PChannelBin, type = Type, accountId = AccountId}}; _ -> ?JError("Access token not found"), {stop, badarg} diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index a4f06513..69cc329a 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -1,36 +1,89 @@ -module(dperl_ouraring_crawl). --include_lib("dperl/dperl.hrl"). +-include_lib("../dperl.hrl"). -behavior(dperl_worker). -behavior(dperl_strategy_scr). +-define(AUTH_CONFIG(__JOB_NAME), + ?GET_CONFIG(ouraRingAuthConfig,[__JOB_NAME], + #{auth_url =>"https://cloud.ouraring.com/oauth/authorize?response_type=code", + client_id => "12345", redirect_uri => "https://localhost:8443/dderl/", + client_secret => "12345", grant_type => "authorization_code", + token_url => "https://cloud.ouraring.com/oauth/token", + scope => "email personal daily"}, + "Oura Ring auth config")). + +-define(KEY_PREFIX(__JOB_NAME), + ?GET_CONFIG(keyPrefix, [__JOB_NAME], ["healthDevice","OuraRing"], + "Default KeyPrefix for Oura Ring data") + ). + -define(SHIFT_DAYS(__JOB_NAME), ?GET_CONFIG(daysToBeShiftedAtStart, [__JOB_NAME], 100, "Days to be shifted backwards for starting the job") ). -% dperl_worker exports --export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, - get_status/1, init_state/1]). +-record(state, { name + , channel + , is_connected = true + , access_token + , api_url + , last_sleep_day + , last_activity_day + , last_readiness_day + , infos = [] + , key_prefix + , accountId + }). --record(state, {name, channel, is_connected = true, access_token, api_url, - last_sleep_day, last_activity_day, last_readiness_day, - infos = [], key_prefix, username}). +% dperl_worker exports +-export([ init/1 + , terminate/2 + , handle_call/3 + , handle_cast/2 + , handle_info/2 + , get_status/1 + , init_state/1 + ]). + +% dderl_oauth exports +-export([ get_auth_config/0 + , get_auth_config/1 + , get_key_prefix/0 + , get_key_prefix/1 + ]). % dperl_strategy_scr export --export([connect_check_src/1, get_source_events/2, connect_check_dst/1, - do_cleanup/2, do_refresh/2, fetch_src/2, fetch_dst/2, delete_dst/2, - insert_dst/3, update_dst/3, report_status/3]). +-export([ connect_check_src/1 + , get_source_events/2 + , connect_check_dst/1 + , do_cleanup/2 + , do_refresh/2 + , fetch_src/2 + , fetch_dst/2 + , delete_dst/2 + , insert_dst/3 + , update_dst/3 + , report_status/3 + ]). + +get_auth_config() -> ?AUTH_CONFIG(<<>>). + +get_auth_config(JobName) -> ?AUTH_CONFIG(JobName). + +get_key_prefix() -> ?KEY_PREFIX(<<>>). + +get_key_prefix(JobName) -> ?KEY_PREFIX(JobName). connect_check_src(#state{is_connected = true} = State) -> {ok, State}; -connect_check_src(#state{is_connected = false, username = Username} = State) -> +connect_check_src(#state{is_connected=false, accountId=AccountId, key_prefix=KeyPrefix} = State) -> ?JTrace("Refreshing access token"), - case dderl_oauth:refresh_access_token(Username, ?OURARING) of + case dderl_oauth:refresh_access_token(AccountId, KeyPrefix, ?SYNC_OURARING) of {ok, AccessToken} -> ?Info("new access token fetched"), - {ok, State#state{access_token = AccessToken, is_connected = true}}; + {ok, State#state{access_token=AccessToken, is_connected=true}}; {error, Error} -> ?JError("Unexpected response : ~p", [Error]), {error, Error, State} @@ -104,18 +157,18 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel} = DstArgs, undefined -> ?JError("Encryption hash is not avaialable"), {stop, badarg}; - {Username, EncHash} -> - ?JInfo("Starting with ~p's enchash...", [Username]), + {AccountId, EncHash} -> + ?JInfo("Starting with ~p's enchash...", [AccountId]), imem_enc_mnesia:put_enc_hash(EncHash), - case dderl_oauth:get_token_info(Username, ?OURARING) of + KeyPrefix = maps:get(key_prefix, DstArgs, get_key_prefix(Name)), + case dderl_oauth:get_token_info(AccountId, KeyPrefix, ?SYNC_OURARING) of #{<<"access_token">> := AccessToken} -> ChannelBin = dperl_dal:to_binary(Channel), - KeyPrefix = maps:get(key_prefix, DstArgs, []), dperl_dal:create_check_channel(ChannelBin), - {ok, State#state{channel = ChannelBin, api_url = ApiUrl, username = Username, - key_prefix = KeyPrefix, access_token = AccessToken}}; + {ok, State#state{channel=ChannelBin, api_url=ApiUrl, accountId=AccountId, + key_prefix=KeyPrefix, access_token=AccessToken}}; _ -> - ?JError("Access token not found"), + ?JError("Access token not found for KeyPrefix ~p",[KeyPrefix]), {stop, badarg} end end; diff --git a/src/imem_adapter.erl b/src/imem_adapter.erl index 8816674e..cb334460 100644 --- a/src/imem_adapter.erl +++ b/src/imem_adapter.erl @@ -810,7 +810,7 @@ process_query(Query, Connection, Params, SessPid) -> , update_cursor_execute_funs = imem_adapter_funs:update_cursor_execute(Connection, StmtRefs) }, SessPid), erlimem_session:add_stmt_fsm(Connection, StmtRefs, {dderl_fsm, StmtFsm}), - EncHash = erlimem_session:run_cmd(Connection, get_enc_hash, []), + EncHash = imem_enc_mnesia:get_enc_hash(), dderl_fsm:put_enc_hash(StmtFsm, EncHash), ?Debug("StmtRslt ~p ~p", [RowCols, SortSpec]), Columns = gen_adapter:build_column_json(lists:reverse(RowCols)), From d8dbca644cbe4892e59c3bb5ee49e2d2daf43e50 Mon Sep 17 00:00:00 2001 From: stoch Date: Fri, 5 Jun 2020 21:05:04 +0200 Subject: [PATCH 53/72] reworking dperl_office_365, WIP --- src/dperl/jobs/dperl_office_365.erl | 337 ++++++++++++++---------- src/dperl/jobs/dperl_ouraring_crawl.erl | 2 +- 2 files changed, 203 insertions(+), 136 deletions(-) diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index 938ca86d..04a44bf6 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -5,17 +5,42 @@ -behavior(dperl_worker). -behavior(dperl_strategy_scr). --define(AUTH_CONFIG(__JOB_NAME), - ?GET_CONFIG(office365AuthConfig,[__JOB_NAME], - #{auth_url =>"https://login.microsoftonline.com/common/oauth2/v2.0/authorize?response_type=code&response_mode=query", - client_id => "12345", redirect_uri => "https://localhost:8443/dderl/", client_secret => "12345", grant_type => "authorization_code", - token_url => "https://login.microsoftonline.com/common/oauth2/v2.0/token", - scope => "offline_access https://graph.microsoft.com/people.read"}, - "Office 365 (Graph API) auth config")). +-define(OAUTH2_CONFIG(__JOB_NAME), + ?GET_CONFIG(oAuth2Config, + [__JOB_NAME], + #{auth_url =>"https://login.microsoftonline.com/common/oauth2/v2.0/authorize?response_type=code&response_mode=query" + ,client_id => "12345" + ,redirect_uri => "https://localhost:8443/dderl/" + ,client_secret => "12345" + ,grant_type => "authorization_code" + ,token_url => "https://login.microsoftonline.com/common/oauth2/v2.0/token" + ,scope => "offline_access https://graph.microsoft.com/people.read" + }, + "Office 365 (Graph API) auth config" + ) + ). -define(KEY_PREFIX(__JOB_NAME), - ?GET_CONFIG(keyPrefix, [__JOB_NAME], ["people","Office365"], - "Default KeyPrefix for Office365 data") + ?GET_CONFIG(keyPrefix, + [__JOB_NAME], + ["contact","Office365"], + "Default KeyPrefix for Office365 data" + ) + ). + +-define(CONTACT_ATTRIBUTES(__JOB_NAME), + ?GET_CONFIG(contactAttributes, + [__JOB_NAME], + [<<"businessPhones">>,<<"mobilePhone">>,<<"title">>,<<"personalNotes">>,<<"parentFolderId">> + ,<<"companyName">>,<<"emailAddresses">>,<<"middleName">>,<<"businessHomePage">>,<<"id">> + ,<<"assistantName">>,<<"department">>,<<"children">>,<<"officeLocation">>,<<"createdDateTime">> + ,<<"profession">>,<<"givenName">>,<<"categories">>,<<"nickName">>,<<"jobTitle">>,<<"yomiGivenName">> + ,<<"changeKey">>,<<"surname">>,<<"imAddresses">>,<<"spouseName">>,<<"yomiSurname">>,<<"businessAddress">> + ,<<"lastModifiedDateTime">>,<<"generation">>,<<"manager">>,<<"initials">>,<<"displayName">> + ,<<"homeAddress">>,<<"otherAddress">>,<<"homePhones">>,<<"fileAs">>,<<"yomiCompanyName">>,<<"birthday">> + ], + "Attributes to be synced for Office365 contact data" + ) ). % dperl_worker exports @@ -33,18 +58,18 @@ -record(state, { name , channel - , is_connected = true - , access_token - , api_url + , isConnected = true + , accessToken + , apiUrl , contacts = [] - , key_prefix - , fetch_url - , cl_contacts = [] - , is_cleanup_finished = true - , push_channel + , keyPrefix + , fetchUrl + , clContacts = [] + , isCleanupFinished = true + , pushChannel , type = pull - , audit_start_time = {0,0} - , first_sync = true + , auditStartTime = {0,0} + , firstSync = true , accountId }). @@ -71,151 +96,200 @@ , get_key_prefix/1 ]). -get_auth_config() -> ?AUTH_CONFIG(<<>>). +get_auth_config() -> ?OAUTH2_CONFIG(<<>>). -get_auth_config(JobName) -> ?AUTH_CONFIG(JobName). +get_auth_config(JobName) -> ?OAUTH2_CONFIG(JobName). get_key_prefix() -> ?KEY_PREFIX(<<>>). get_key_prefix(JobName) -> ?KEY_PREFIX(JobName). -connect_check_src(#state{is_connected = true} = State) -> +% determine the contact id (last piece of cekey) from ckey or from the remote id +% this id is a string representing a hash of the remote id +-spec local_contact_id(list()|binary()) -> string(). +local_contact_id(Key) when is_list(Key) -> + lists:last(Key); +local_contact_id(Bin) when is_binary(Bin) -> + io_lib:format("~.36B",[erlang:phash2(Bin)]). + +% convert list of remote values (already maps) to list of {Key,RemoteId,RemoteValue} triples +% which serves as a lookup buffer of the complete remote state, avoiding sorting issues +format_remote_values_to_kv(Values, KeyPrefix, JobName) -> + format_remote_values_to_kv(Values, KeyPrefix, JobName, []). + +format_remote_values_to_kv([], _KeyPrefix, _JobName, Acc) -> Acc; +format_remote_values_to_kv([Value|Values], KeyPrefix, JobName, Acc) -> + #{<<"id">> := RemoteId} = Value, + Key = KeyPrefix ++ [local_contact_id(RemoteId)], + format_remote_values_to_kv(Values, KeyPrefix, JobName, [{Key,RemoteId,format_value(Value, JobName)}|Acc]). + +% format remote or local value by projecting it down to configured list of synced attributes +format_value(Value, JobName) when is_map(Value) -> maps:with(?CONTACT_ATTRIBUTES(JobName), Value). + +connect_check_src(#state{isConnected=true} = State) -> {ok, State}; -connect_check_src(#state{is_connected=false, accountId=AccountId, key_prefix=KeyPrefix} = State) -> +connect_check_src(#state{isConnected=false, accountId=AccountId, keyPrefix=KeyPrefix} = State) -> ?JTrace("Refreshing access token"), - case dderl_oauth:refresh_access_token(AccountId, KeyPrefix, ?SYNC_OFFICE365) of + case dderl_oauth:refresh_accessToken(AccountId, KeyPrefix, ?SYNC_OFFICE365) of {ok, AccessToken} -> ?Info("new access token fetched"), - {ok, State#state{access_token=AccessToken, is_connected=true}}; + {ok, State#state{accessToken=AccessToken, isConnected=true}}; {error, Error} -> ?JError("Unexpected response : ~p", [Error]), {error, Error, State} end. -get_source_events(#state{audit_start_time = LastStartTime, type = push, - push_channel = PChannel} = State, BulkSize) -> - case dperl_dal:read_audit_keys(PChannel, LastStartTime, BulkSize) of +get_source_events(#state{auditStartTime=LastStartTime, type=push, + pushChannel=PushChannel, firstSync=FirstSync} = State, BulkSize) -> + case dperl_dal:read_audit_keys(PushChannel, LastStartTime, BulkSize) of {LastStartTime, LastStartTime, []} -> - if State#state.first_sync == true -> + if + FirstSync == true -> ?JInfo("Audit rollup is complete"), - {ok, sync_complete, State#state{first_sync = false}}; - true -> {ok, sync_complete, State} + {ok, sync_complete, State#state{firstSync=false}}; + true -> + {ok, sync_complete, State} end; {_StartTime, NextStartTime, []} -> - {ok, [], State#state{audit_start_time = NextStartTime}}; + {ok, [], State#state{auditStartTime=NextStartTime}}; {_StartTime, NextStartTime, Keys} -> UniqueKeys = lists:delete(undefined, lists:usort(Keys)), - {ok, UniqueKeys, State#state{audit_start_time = NextStartTime}} + {ok, UniqueKeys, State#state{auditStartTime=NextStartTime}} end; -get_source_events(#state{contacts = []} = State, _BulkSize) -> +get_source_events(#state{contacts=[]} = State, _BulkSize) -> {ok, sync_complete, State}; -get_source_events(#state{contacts = Contacts} = State, _BulkSize) -> - {ok, Contacts, State#state{contacts = []}}. +get_source_events(#state{contacts=Contacts} = State, _BulkSize) -> + {ok, Contacts, State#state{contacts=[]}}. -connect_check_dst(State) -> {ok, State}. +connect_check_dst(State) -> {ok, State}. % Question: Why defaulted for push destination? do_refresh(_State, _BulkSize) -> {error, cleanup_only}. -fetch_src(Key, #state{type = push} = State) -> - dperl_dal:read_channel(State#state.push_channel, Key); -fetch_src(Key, #state{cl_contacts = Contacts, type = pull}) -> - case lists:keyfind(Key, 1, Contacts) of - {Key, Contact} -> Contact; +fetch_src(Key, #state{pushChannel=PushChannel, type=push}) -> + dperl_dal:read_channel(PushChannel, Key); +fetch_src(Key, #state{clContacts=ClContacts, type=pull}) -> + case lists:keyfind(Key, 1, ClContacts) of + {Key, _RemoteId, Value} -> Value; false -> ?NOT_FOUND end. -fetch_dst(Key, #state{type = push, api_url = ApiUrl} = State) -> - Id = Key -- State#state.key_prefix, - ContactUrl = erlang:iolist_to_binary([ApiUrl, Id]), - case exec_req(ContactUrl, State#state.access_token) of - #{<<"id">> := _} = Contact -> - format_contact(Contact); - _ -> ?NOT_FOUND - end; -fetch_dst(Key, State) -> - dperl_dal:read_channel(State#state.channel, Key). - -insert_dst(Key, Val, #state{type = push, api_url = ApiUrl} = State) -> - case exec_req(ApiUrl, State#state.access_token, Val, post) of - #{<<"id">> := Id} = Contact -> - NewKey = State#state.key_prefix ++ [binary_to_list(Id)], - ContactBin = imem_json:encode(format_contact(Contact)), - dperl_dal:remove_from_channel(State#state.push_channel, Key), - dperl_dal:write_channel(State#state.channel, NewKey, ContactBin), - dperl_dal:write_channel(State#state.push_channel, NewKey, ContactBin), +fetch_dst(Key, #state{name=Name, clContacts=ClContacts, type=push, + apiUrl=ApiUrl, accessToken=AccessToken} = State) -> + case lists:keyfind(Key, 1, ClContacts) of + {Key, RemoteId, _Value} -> + ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), + case exec_req(ContactUrl, AccessToken) of + #{<<"id">> := _} = RValue -> format_value(RValue, Name); + {error, unauthorized} -> reconnect_exec(State, fetch_dst, [Key]); + {error, Error} -> {error, Error}; + _ -> ?NOT_FOUND + end; + false -> + ?NOT_FOUND + end; +fetch_dst(Key, #state{channel=Channel}) -> + dperl_dal:read_channel(Channel, Key). + +insert_dst(Key, Value, #state{name=Name, channel=Channel, pushChannel=PushChannel, type=push, + keyPrefix=KeyPrefix, apiUrl=ApiUrl, accessToken=AccessToken} = State) -> + case exec_req(ApiUrl, AccessToken, Value, post) of + #{<<"id">> := Id} = RemoteValue -> + NewKey = KeyPrefix ++ [local_contact_id(Id)], + FormRemote = format_value(RemoteValue, Name), + PushValue = dperl_dal:read_channel(PushChannel, Key), + MergeValue = maps:merge(PushValue, FormRemote), + MergedBin = imem_json:encode(MergeValue), + dperl_dal:remove_from_channel(PushChannel, Key), + dperl_dal:write_channel(Channel, NewKey, MergedBin), + dperl_dal:write_channel(PushChannel, NewKey, MergedBin), {false, State}; {error, unauthorized} -> - reconnect_exec(State, insert_dst, [Key, Val]); + reconnect_exec(State, insert_dst, [Key, Value]); {error, Error} -> {error, Error} end; -insert_dst(Key, Val, State) -> - update_dst(Key, Val, State). - -delete_dst(Key, #state{type = push, api_url = ApiUrl} = State) -> - Id = Key -- State#state.key_prefix, - ContactUrl = erlang:iolist_to_binary([ApiUrl, Id]), - case exec_req(ContactUrl, State#state.access_token, #{}, delete) of - ok -> - dperl_dal:remove_from_channel(State#state.channel, Key), - {false, State}; - {error, unauthorized} -> - reconnect_exec(State, delete_dst, [Key]); - Error -> - Error +insert_dst(Key, Value, State) -> + update_dst(Key, Value, State). + +delete_dst(Key, #state{channel=Channel, type=push, clContacts=ClContacts, + apiUrl=ApiUrl, accessToken=AccessToken} = State) -> + case lists:keyfind(Key, 1, ClContacts) of + {Key, RemoteId, _Value} -> + ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), + case exec_req(ContactUrl, AccessToken, #{}, delete) of + ok -> + dperl_dal:remove_from_channel(Channel, Key), + {false, State}; + {error, unauthorized} -> + reconnect_exec(State, delete_dst, [Key]); + Error -> + Error + end; + false -> + {false, State} end; -delete_dst(Key, #state{channel = Channel} = State) -> +delete_dst(Key, #state{channel=Channel, pushChannel=PushChannel} = State) -> dperl_dal:remove_from_channel(Channel, Key), - dperl_dal:remove_from_channel(State#state.push_channel, Key), + dperl_dal:remove_from_channel(PushChannel, Key), {false, State}. -update_dst(Key, Val, #state{type = push, api_url = ApiUrl} = State) -> - Id = Key -- State#state.key_prefix, - ContactUrl = erlang:iolist_to_binary([ApiUrl, Id]), - case exec_req(ContactUrl, State#state.access_token, Val, patch) of - #{<<"id">> := _} = Contact -> - ContactBin = imem_json:encode(format_contact(Contact)), - dperl_dal:write_channel(State#state.channel, Key, ContactBin), - dperl_dal:write_channel(State#state.push_channel, Key, ContactBin), - {false, State}; - {error, unauthorized} -> - reconnect_exec(State, update_dst, [Key, Val]); - {error, Error} -> - {error, Error} +-spec update_dst(Key::list(), Value::map(), #state{}) -> tuple(). +update_dst(Key, Value, #state{name=Name, channel=Channel, pushChannel=PushChannel, type=push, + clContacts=ClContacts, apiUrl=ApiUrl, accessToken=AccessToken} = State) -> + case lists:keyfind(Key, 1, ClContacts) of + {Key, RemoteId, _Value} -> + ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), + case exec_req(ContactUrl, AccessToken, Value, patch) of + #{<<"id">> := _} = RemoteValue -> + FormRemote = format_value(RemoteValue, Name), + OldValue = dperl_dal:read_channel(PushChannel, Key), + MergeValue = maps:merge(OldValue, FormRemote), + MergedBin = imem_json:encode(MergeValue), + dperl_dal:remove_from_channel(PushChannel, Key), + dperl_dal:write_channel(Channel, Key, MergedBin), + dperl_dal:write_channel(PushChannel, Key, MergedBin), + {false, State}; + {error, unauthorized} -> + reconnect_exec(State, update_dst, [Key, Value]); + {error, Error} -> + {error, Error} + end; + false -> + {false, State} end; -update_dst(Key, Val, #state{channel = Channel} = State) when is_binary(Val) -> - dperl_dal:write_channel(Channel, Key, Val), - dperl_dal:write_channel(State#state.push_channel, Key, Val), - {false, State}; -update_dst(Key, Val, State) -> - update_dst(Key, imem_json:encode(Val), State). +update_dst(Key, Value, #state{channel=Channel, pushChannel=PushChannel} = State) when is_map(Value) -> + OldValue = dperl_dal:read_channel(Channel, Key), + MergeValue = maps:merge(OldValue, Value), + MergedBin = imem_json:encode(MergeValue), + dperl_dal:write_channel(Channel, Key, MergedBin), + dperl_dal:write_channel(PushChannel, Key, MergedBin), + {false, State}. report_status(_Key, _Status, _State) -> no_op. load_dst_after_key(CurKey, BlkCount, #state{channel = Channel}) -> dperl_dal:read_gt(Channel, CurKey, BlkCount). -load_src_after_key(CurKey, BlkCount, #state{fetch_url = undefined} = State) -> - % https://graph.microsoft.com/v1.0/me/contacts/?$top=100&$select=displayName&orderby=displayName +load_src_after_key(CurKey, BlkCount, #state{type=pull, fetchUrl=undefined, apiUrl=ApiUrl} = State) -> UrlParams = dperl_dal:url_enc_params(#{"$top" => integer_to_list(BlkCount)}), - ContactsUrl = erlang:iolist_to_binary([State#state.api_url, "?", UrlParams]), - load_src_after_key(CurKey, BlkCount, State#state{fetch_url = ContactsUrl}); -load_src_after_key(CurKey, BlkCount, #state{is_cleanup_finished = true, key_prefix = KeyPrefix, - access_token = AccessToken, fetch_url = FetchUrl} = State) -> + ContactsUrl = erlang:iolist_to_binary([ApiUrl, "?", UrlParams]), + load_src_after_key(CurKey, BlkCount, State#state{fetchUrl=ContactsUrl}); +load_src_after_key(CurKey, BlkCount, #state{name=Name, type=pull, isCleanupFinished=true, + keyPrefix=KeyPrefix, accessToken=AccessToken, fetchUrl=FetchUrl} = State) -> % fetch all contacts - case fetch_all_contacts(FetchUrl, AccessToken, KeyPrefix) of + case fetch_all_contacts(FetchUrl, AccessToken, KeyPrefix, Name) of {ok, Contacts} -> - load_src_after_key(CurKey, BlkCount, State#state{cl_contacts = Contacts, is_cleanup_finished = false}); + load_src_after_key(CurKey, BlkCount, State#state{clContacts=Contacts, isCleanupFinished=false}); {error, unauthorized} -> reconnect_exec(State, load_src_after_key, [CurKey, BlkCount]); {error, Error} -> {error, Error, State} end; -load_src_after_key(CurKey, BlkCount, #state{cl_contacts = Contacts} = State) -> +load_src_after_key(CurKey, BlkCount, #state{clContacts=Contacts} = State) -> {ok, get_contacts_gt(CurKey, BlkCount, Contacts), State}. reconnect_exec(State, Fun, Args) -> - case connect_check_src(State#state{is_connected = false}) of + case connect_check_src(State#state{isConnected = false}) of {ok, State1} -> erlang:apply(?MODULE, Fun, Args ++ [State1]); {error, Error, State1} -> @@ -226,16 +300,16 @@ do_cleanup(_Deletes, _Inserts, _Diffs, _IsFinished, #state{type = push}) -> {error, <<"cleanup only for pull job">>}; do_cleanup(Deletes, Inserts, Diffs, IsFinished, State) -> NewState = State#state{contacts = Inserts ++ Diffs ++ Deletes}, - if IsFinished -> {ok, finish, NewState#state{is_cleanup_finished = true}}; - true -> {ok, NewState} + if IsFinished -> {ok, finish, NewState#state{isCleanupFinished=true}}; + true -> {ok, NewState} end. get_status(#state{}) -> #{}. init_state(_) -> #state{}. -init({#dperlJob{name=Name, srcArgs = #{api_url := ApiUrl}, args = Args, - dstArgs = #{channel := Channel, push_channel := PChannel} = DstArgs}, State}) -> +init({#dperlJob{name=Name, srcArgs = #{apiUrl := ApiUrl}, args = Args, + dstArgs = #{channel := Channel, pushChannel := PChannel} = DstArgs}, State}) -> case dperl_auth_cache:get_enc_hash(Name) of undefined -> ?JError("Encryption hash is not avaialable"), @@ -243,17 +317,17 @@ init({#dperlJob{name=Name, srcArgs = #{api_url := ApiUrl}, args = Args, {AccountId, EncHash} -> ?JInfo("Starting with ~p's enchash...", [AccountId]), imem_enc_mnesia:put_enc_hash(EncHash), - KeyPrefix = maps:get(key_prefix, DstArgs, get_key_prefix(Name)), + KeyPrefix = maps:get(keyPrefix, DstArgs, get_key_prefix(Name)), case dderl_oauth:get_token_info(AccountId, KeyPrefix, ?SYNC_OFFICE365) of - #{<<"access_token">> := AccessToken} -> + #{<<"accessToken">> := AccessToken} -> ChannelBin = dperl_dal:to_binary(Channel), PChannelBin = dperl_dal:to_binary(PChannel), Type = maps:get(type, Args, pull), dperl_dal:create_check_channel(ChannelBin), dperl_dal:create_check_channel(PChannelBin), - {ok, State#state{channel = ChannelBin, name = Name, api_url = ApiUrl, - key_prefix = KeyPrefix, access_token = AccessToken, - push_channel = PChannelBin, type = Type, accountId = AccountId}}; + {ok, State#state{channel = ChannelBin, name = Name, apiUrl = ApiUrl, + keyPrefix = KeyPrefix, accessToken = AccessToken, + pushChannel = PChannelBin, type = Type, accountId = AccountId}}; _ -> ?JError("Access token not found"), {stop, badarg} @@ -281,28 +355,19 @@ terminate(Reason, _State) -> %% private functions -format_contacts([], _) -> []; -format_contacts([#{<<"id">> := IdBin} = Contact | Contacts], KeyPrefix) -> - Id = binary_to_list(IdBin), - Key = KeyPrefix ++ [Id], - [{Key, format_contact(Contact)} | format_contacts(Contacts, KeyPrefix)]. - -format_contact(Contact) -> - maps:without([<<"@odata.etag">>, <<"@odata.context">>], Contact). - -fetch_all_contacts(Url, AccessToken, KeyPrefix) -> - fetch_all_contacts(Url, AccessToken, KeyPrefix, []). +fetch_all_contacts(Url, AccessToken, KeyPrefix, JobName) -> + fetch_all_contacts(Url, AccessToken, KeyPrefix, JobName, []). -fetch_all_contacts(Url, AccessToken, KeyPrefix, AccContacts) -> +fetch_all_contacts(Url, AccessToken, KeyPrefix, JobName, AccContacts) -> ?JTrace("Fetching contacts with url : ~s", [Url]), ?JTrace("Fetched contacts : ~p", [length(AccContacts)]), case exec_req(Url, AccessToken) of - #{<<"@odata.nextLink">> := NextUrl, <<"value">> := Contacts} -> - FContacts = format_contacts(Contacts, KeyPrefix), - fetch_all_contacts(NextUrl, AccessToken, KeyPrefix, lists:append(FContacts, AccContacts)); - #{<<"value">> := Contacts} -> - FContacts = format_contacts(Contacts, KeyPrefix), - {ok, lists:keysort(1, lists:append(FContacts, AccContacts))}; + #{<<"@odata.nextLink">> := NextUrl, <<"value">> := MoreContacts} -> + Contacts = format_remote_values_to_kv(MoreContacts, KeyPrefix, JobName), + fetch_all_contacts(NextUrl, AccessToken, KeyPrefix, lists:append(Contacts, AccContacts)); + #{<<"value">> := MoreContacts} -> + Contacts = format_remote_values_to_kv(MoreContacts, KeyPrefix, JobName), + {ok, lists:append(Contacts, AccContacts)}; {error, Error} -> {error, Error} end. @@ -318,6 +383,7 @@ get_contacts_gt(CurKey, BlkCount, [{Key, _} | Contacts], Acc) when Key =< CurKey get_contacts_gt(CurKey, BlkCount, [Contact | Contacts], Acc) -> get_contacts_gt(CurKey, BlkCount, Contacts, [Contact | Acc]). +-spec exec_req(Url::binary()|string(), AccessToken::binary()) -> tuple(). exec_req(Url, AccessToken) when is_binary(Url) -> exec_req(binary_to_list(Url), AccessToken); exec_req(Url, AccessToken) -> @@ -331,7 +397,8 @@ exec_req(Url, AccessToken) -> {error, Error} end. -exec_req(Url, AccessToken, Body, Method) when is_binary(Url) -> +-spec exec_req(Url::binary()|string(), AccessToken::binary(), Body::map(), Method::atom()) -> tuple(). +exec_req(Url, AccessToken, Body, Method) when is_binary(Url), is_map(Body) -> exec_req(binary_to_list(Url), AccessToken, Body, Method); exec_req(Url, AccessToken, Body, Method) -> AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}], diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dperl_ouraring_crawl.erl index 69cc329a..c0de1b91 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dperl_ouraring_crawl.erl @@ -6,7 +6,7 @@ -behavior(dperl_strategy_scr). -define(AUTH_CONFIG(__JOB_NAME), - ?GET_CONFIG(ouraRingAuthConfig,[__JOB_NAME], + ?GET_CONFIG(oAuth2Config,[__JOB_NAME], #{auth_url =>"https://cloud.ouraring.com/oauth/authorize?response_type=code", client_id => "12345", redirect_uri => "https://localhost:8443/dderl/", client_secret => "12345", grant_type => "authorization_code", From b48315d45cb862c38845abf7fa220f1d56c897cd Mon Sep 17 00:00:00 2001 From: stoch Date: Sun, 7 Jun 2020 20:33:54 +0200 Subject: [PATCH 54/72] contact sync WIP, still remote state as triples, deletes outside prefix --- src/dderl_dal.erl | 3 ++- src/dderl_session.erl | 2 +- src/dperl/jobs/dperl_office_365.erl | 18 ++++++++++-------- 3 files changed, 13 insertions(+), 10 deletions(-) diff --git a/src/dderl_dal.erl b/src/dderl_dal.erl index b962596a..afa0e302 100644 --- a/src/dderl_dal.erl +++ b/src/dderl_dal.erl @@ -876,7 +876,8 @@ write_to_avatar_channel(AccountId, Key, Value) -> read_from_avatar_channel(AccountId, Key) -> case imem_dal_skvh:read(AccountId, avatar_channel_name(AccountId), [Key]) of [#{cvalue := CValue}] when is_binary(CValue) -> imem_json:decode(CValue, [return_maps]); - [#{cvalue := CValue}] when is_map(CValue)-> CValue + [#{cvalue := CValue}] when is_map(CValue)-> CValue; + [] -> [] end. -spec exec_is_proxy_fun(reference(), map()) -> boolean(). diff --git a/src/dderl_session.erl b/src/dderl_session.erl index e65b4fb4..f223209f 100644 --- a/src/dderl_session.erl +++ b/src/dderl_session.erl @@ -865,7 +865,7 @@ login(ReqData, From, SrcIp, State) -> State end; {_SKey, AccountId, EncHash} -> - ?Info("Login Result SKey:~p AccountId:~p EncHash:~p",[_SKey,AccountId,EncHash]), + %?Info("Login Result SKey:~p AccountId:~p EncHash:~p",[_SKey,AccountId,EncHash]), ok = dderl_dal:create_check_avatar_channel(AccountId), imem_enc_mnesia:put_enc_hash(EncHash), act_log(From, ?LOGIN_CONNECT diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index 04a44bf6..f6d85d3f 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -61,10 +61,10 @@ , isConnected = true , accessToken , apiUrl - , contacts = [] + , contacts = [] % , keyPrefix , fetchUrl - , clContacts = [] + , clContacts = [] % , isCleanupFinished = true , pushChannel , type = pull @@ -130,7 +130,7 @@ connect_check_src(#state{isConnected=true} = State) -> {ok, State}; connect_check_src(#state{isConnected=false, accountId=AccountId, keyPrefix=KeyPrefix} = State) -> ?JTrace("Refreshing access token"), - case dderl_oauth:refresh_accessToken(AccountId, KeyPrefix, ?SYNC_OFFICE365) of + case dderl_oauth:refresh_access_token(AccountId, KeyPrefix, ?SYNC_OFFICE365) of {ok, AccessToken} -> ?Info("new access token fetched"), {ok, State#state{accessToken=AccessToken, isConnected=true}}; @@ -308,8 +308,8 @@ get_status(#state{}) -> #{}. init_state(_) -> #state{}. -init({#dperlJob{name=Name, srcArgs = #{apiUrl := ApiUrl}, args = Args, - dstArgs = #{channel := Channel, pushChannel := PChannel} = DstArgs}, State}) -> +init({#dperlJob{name=Name, srcArgs=#{apiUrl:=ApiUrl}, args=Args, + dstArgs=#{channel:=Channel, pushChannel:=PChannel} = DstArgs}, State}) -> case dperl_auth_cache:get_enc_hash(Name) of undefined -> ?JError("Encryption hash is not avaialable"), @@ -319,7 +319,7 @@ init({#dperlJob{name=Name, srcArgs = #{apiUrl := ApiUrl}, args = Args, imem_enc_mnesia:put_enc_hash(EncHash), KeyPrefix = maps:get(keyPrefix, DstArgs, get_key_prefix(Name)), case dderl_oauth:get_token_info(AccountId, KeyPrefix, ?SYNC_OFFICE365) of - #{<<"accessToken">> := AccessToken} -> + #{<<"access_token">> := AccessToken} -> ChannelBin = dperl_dal:to_binary(Channel), PChannelBin = dperl_dal:to_binary(PChannel), Type = maps:get(type, Args, pull), @@ -329,7 +329,7 @@ init({#dperlJob{name=Name, srcArgs = #{apiUrl := ApiUrl}, args = Args, keyPrefix = KeyPrefix, accessToken = AccessToken, pushChannel = PChannelBin, type = Type, accountId = AccountId}}; _ -> - ?JError("Access token not found"), + ?JError("Access token not found for ~p at ~p", [AccountId, KeyPrefix]), {stop, badarg} end end; @@ -355,6 +355,8 @@ terminate(Reason, _State) -> %% private functions +%% Fetch all remote contacts, create 3-tuple {Key::list(), RemoteId::binary(), RemoteValue::map()) +%% Sort by Key (needed for sync) fetch_all_contacts(Url, AccessToken, KeyPrefix, JobName) -> fetch_all_contacts(Url, AccessToken, KeyPrefix, JobName, []). @@ -367,7 +369,7 @@ fetch_all_contacts(Url, AccessToken, KeyPrefix, JobName, AccContacts) -> fetch_all_contacts(NextUrl, AccessToken, KeyPrefix, lists:append(Contacts, AccContacts)); #{<<"value">> := MoreContacts} -> Contacts = format_remote_values_to_kv(MoreContacts, KeyPrefix, JobName), - {ok, lists:append(Contacts, AccContacts)}; + {ok, lists:keysort(1, lists:append(Contacts, AccContacts))}; {error, Error} -> {error, Error} end. From ab6099e5da642740fa385c75e49ef8e6e19f9730 Mon Sep 17 00:00:00 2001 From: stoch Date: Tue, 9 Jun 2020 22:26:19 +0200 Subject: [PATCH 55/72] remove pushChannel and create index on remoteIds --- src/dderl_oauth.erl | 32 ++-- src/dderl_session.erl | 16 +- src/dperl/jobs/dperl_office_365.erl | 252 +++++++++++++++++----------- 3 files changed, 178 insertions(+), 122 deletions(-) diff --git a/src/dderl_oauth.erl b/src/dderl_oauth.erl index 78e0dbff..733a0963 100644 --- a/src/dderl_oauth.erl +++ b/src/dderl_oauth.erl @@ -11,15 +11,15 @@ , refresh_access_token/3 ]). -get_token_info(AccountId, KeyPrefix, _SyncType) -> - dderl_dal:read_from_avatar_channel(AccountId, KeyPrefix ++ [?TOKEN_KEYPART]). +get_token_info(AccountId, TokenPrefix, _SyncType) -> + dderl_dal:read_from_avatar_channel(AccountId, TokenPrefix ++ [?TOKEN_KEYPART]). -set_token_info(AccountId, KeyPrefix, TokenInfo, SyncType) when is_map(TokenInfo) -> - set_token_info(AccountId, KeyPrefix, imem_json:encode(TokenInfo), SyncType); -set_token_info(AccountId, KeyPrefix, TokenInfo, SyncType) when is_list(TokenInfo) -> - set_token_info(AccountId, KeyPrefix, list_to_binary(TokenInfo), SyncType); -set_token_info(AccountId, KeyPrefix, TokenInfo, _SyncType) when is_binary(TokenInfo) -> - dderl_dal:write_to_avatar_channel(AccountId, KeyPrefix ++ [?TOKEN_KEYPART], TokenInfo). +set_token_info(AccountId, TokenPrefix, TokenInfo, SyncType) when is_map(TokenInfo) -> + set_token_info(AccountId, TokenPrefix, imem_json:encode(TokenInfo), SyncType); +set_token_info(AccountId, TokenPrefix, TokenInfo, SyncType) when is_list(TokenInfo) -> + set_token_info(AccountId, TokenPrefix, list_to_binary(TokenInfo), SyncType); +set_token_info(AccountId, TokenPrefix, TokenInfo, _SyncType) when is_binary(TokenInfo) -> + dderl_dal:write_to_avatar_channel(AccountId, TokenPrefix ++ [?TOKEN_KEYPART], TokenInfo). get_authorize_url(XSRFToken, AuthConfig, SyncType) -> State = #{xsrfToken => XSRFToken, type => SyncType}, @@ -29,9 +29,9 @@ get_authorize_url(XSRFToken, AuthConfig, SyncType) -> "scope" => {enc, Scope}, "state" => {enc, imem_json:encode(State)}}), erlang:iolist_to_binary([Url, "&", UrlParams]). -get_access_token(AccountId, KeyPrefix, Code, SyncType) -> +get_access_token(AccountId, TokenPrefix, Code, SyncType) -> AuthConfig = try - SyncType:get_auth_config() % ToDo: AuthConfig may depend on JobName or KeyPrefix + SyncType:get_auth_config() % ToDo: AuthConfig may depend on JobName or TokenPrefix catch _:E:S -> ?Error("Finding AuthConfig : ~p ñ~p", [E,S]), @@ -48,7 +48,7 @@ get_access_token(AccountId, KeyPrefix, Code, SyncType) -> ContentType = "application/x-www-form-urlencoded", case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {{_, 200, "OK"}, _, TokenInfo}} -> - set_token_info(AccountId, KeyPrefix, TokenInfo, SyncType), + set_token_info(AccountId, TokenPrefix, TokenInfo, SyncType), ok; {ok, {{_, Code, _}, _, Error}} -> ?Error("Fetching access token : ~p:~p", [Code, Error]), @@ -58,10 +58,10 @@ get_access_token(AccountId, KeyPrefix, Code, SyncType) -> {error, Error} end. -refresh_access_token(AccountId, KeyPrefix, SyncType) -> +refresh_access_token(AccountId, TokenPrefix, SyncType) -> #{token_url := TUrl, client_id := ClientId, scope := Scope, client_secret := Secret} = SyncType:get_auth_config(), - #{<<"refresh_token">> := RefreshToken} = get_token_info(AccountId, KeyPrefix, SyncType), + #{<<"refresh_token">> := RefreshToken} = get_token_info(AccountId, TokenPrefix, SyncType), Body = dperl_dal:url_enc_params( #{"client_id" => ClientId, "client_secret" => {enc, Secret}, "scope" => {enc, Scope}, "refresh_token" => RefreshToken, "grant_type" => "refresh_token"}), @@ -69,9 +69,9 @@ refresh_access_token(AccountId, KeyPrefix, SyncType) -> case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {{_, 200, "OK"}, _, TokenBody}} -> TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), - set_token_info(AccountId, KeyPrefix, TokenBody, SyncType), - #{<<"access_token">> := AccessToken} = TokenInfo, - {ok, AccessToken}; + set_token_info(AccountId, TokenPrefix, TokenBody, SyncType), + #{<<"access_token">> := Token} = TokenInfo, + {ok, Token}; Error -> {error, Error} end. diff --git a/src/dderl_session.erl b/src/dderl_session.erl index f223209f..e34272e6 100644 --- a/src/dderl_session.erl +++ b/src/dderl_session.erl @@ -382,14 +382,14 @@ process_call({[<<"about">>], _ReqData}, _Adapter, From, {SrcIp,_}, State) -> process_call({[<<"office_365_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, _}, State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "office_365_auth_config"}, State), - AuthConfig = dperl_office_365:get_auth_config(), % ToDo: may depend on JobName or KeyPrefix + AuthConfig = dperl_office_365:get_auth_config(), % ToDo: may depend on JobName or TokenPrefix Url = dderl_oauth:get_authorize_url(State#state.xsrf_token, AuthConfig, ?SYNC_OFFICE365), reply(From, #{<<"office_365_auth_config">> => #{<<"url">> => Url}}, self()), State; process_call({[<<"oura_ring_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, _}, State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "oura_ring_auth_config"}, State), - AuthConfig = dperl_ouraring_crawl:get_auth_config(), % ToDo: may depend on JobName or KeyPrefix + AuthConfig = dperl_ouraring_crawl:get_auth_config(), % ToDo: may depend on JobName or TokenPrefix ?Info("oura_ring_auth_config ~p",[AuthConfig]), Url = dderl_oauth:get_authorize_url(State#state.xsrf_token, AuthConfig, ?SYNC_OURARING), reply(From, #{<<"oura_ring_auth_config">> => #{<<"url">> => Url}}, self()), @@ -401,16 +401,16 @@ process_call({[<<"oauth2_callback">>], ReqData}, _Adapter, From, {SrcIp, _}, Sta #{<<"code">> := Code, <<"state">> := #{<<"type">> := SyncType}}} = jsx:decode(ReqData, [return_maps]), ?Info("oauth2_callback SyncType: ~p Code: ~p",[SyncType, Code]), % ToDo: Check if this data can this be trusted - {SyncHandler,KeyPrefix} = try + {SyncHandler,TokenPrefix} = try SH = binary_to_existing_atom(SyncType,utf8), - {SH,SH:get_key_prefix()} % ToDo: may depend on JobName or KeyPrefix + {SH,SH:get_auth_token_key_prefix()} % ToDo: may depend on JobName or TokenPrefix catch _:E:_ -> - ?Error("Finding KeyPrefix : ~p", [E]), - reply(From, #{<<"oauth2_callback">> => #{<<"error">> => <<"Error finding KeyPrefix">>}}, self()) + ?Error("Finding TokenPrefix : ~p", [E]), + reply(From, #{<<"oauth2_callback">> => #{<<"error">> => <<"Error finding TokenPrefix">>}}, self()) end, - ?Info("oauth2_callback KeyPrefix: ~p",[KeyPrefix]), - case dderl_oauth:get_access_token(State#state.user, KeyPrefix, Code, SyncHandler) of + ?Info("oauth2_callback TokenPrefix: ~p",[TokenPrefix]), + case dderl_oauth:get_access_token(State#state.user, TokenPrefix, Code, SyncHandler) of ok -> reply(From, #{<<"oauth2_callback">> => #{<<"status">> => <<"ok">>}}, self()); {error, Error} -> diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dperl_office_365.erl index f6d85d3f..4da0c550 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dperl_office_365.erl @@ -20,29 +20,56 @@ ) ). +-define(OAUTH2_TOKEN_KEY_PREFIX(__JOB_NAME), + ?GET_CONFIG(oAuth2TokenKeyPrefix, + [__JOB_NAME], + ["dperlJob","Office365"], + "Default KeyPrefix for Office365 token cache" + ) + ). + -define(KEY_PREFIX(__JOB_NAME), ?GET_CONFIG(keyPrefix, [__JOB_NAME], ["contact","Office365"], - "Default KeyPrefix for Office365 data" + "Default KeyPrefix for Office365 contact data" + ) + ). + +-define(CONTACT_INDEXID, + ?GET_CONFIG(contactIndexId, + [], + 1, + "Id for index on Office365 contact data" ) ). --define(CONTACT_ATTRIBUTES(__JOB_NAME), +-define(CONTENT_ATTRIBUTES(__JOB_NAME), ?GET_CONFIG(contactAttributes, [__JOB_NAME], - [<<"businessPhones">>,<<"mobilePhone">>,<<"title">>,<<"personalNotes">>,<<"parentFolderId">> - ,<<"companyName">>,<<"emailAddresses">>,<<"middleName">>,<<"businessHomePage">>,<<"id">> - ,<<"assistantName">>,<<"department">>,<<"children">>,<<"officeLocation">>,<<"createdDateTime">> - ,<<"profession">>,<<"givenName">>,<<"categories">>,<<"nickName">>,<<"jobTitle">>,<<"yomiGivenName">> - ,<<"changeKey">>,<<"surname">>,<<"imAddresses">>,<<"spouseName">>,<<"yomiSurname">>,<<"businessAddress">> - ,<<"lastModifiedDateTime">>,<<"generation">>,<<"manager">>,<<"initials">>,<<"displayName">> - ,<<"homeAddress">>,<<"otherAddress">>,<<"homePhones">>,<<"fileAs">>,<<"yomiCompanyName">>,<<"birthday">> + [<<"businessPhones">>,<<"mobilePhone">> %,<<"title">> ,<<"personalNotes">> + ,<<"companyName">>,<<"emailAddresses">> % ,<<"middleName">>,<<"businessHomePage">> + ,<<"assistantName">>,<<"department">> % ,<<"children">>,<<"officeLocation">> + ,<<"profession">>,<<"givenName">>,<<"categories">>,<<"jobTitle">> % ,<<"nickName">>,<<"yomiGivenName">> + ,<<"surname">>,<<"imAddresses">>,<<"businessAddress">> % ,<<"spouseName">>,<<"yomiSurname">> + ,<<"manager">> % ,<<"generation">>,<<"initials">>,<<"displayName">> + % ,<<"homeAddress">>,<<"otherAddress">>,<<"homePhones">>,<<"fileAs">>,<<"yomiCompanyName">>,<<"birthday">> ], "Attributes to be synced for Office365 contact data" ) ). +-define(META_ATTRIBUTES(__JOB_NAME), + ?GET_CONFIG(contactAttributes, + [__JOB_NAME], + [<<"id">> + ,<<"lastModifiedDateTime">> + ,<<"changeKey">> %,<<"parentFolderId">>,<<"createdDateTime">> + ], + "Attributes used for Office365 contact change tracking" + ) + ). + % dperl_worker exports -export([ init/1 , terminate/2 @@ -56,21 +83,22 @@ % contacts graph api % https://docs.microsoft.com/en-us/graph/api/resources/contact?view=graph-rest-1.0 --record(state, { name - , channel - , isConnected = true - , accessToken - , apiUrl - , contacts = [] % - , keyPrefix - , fetchUrl - , clContacts = [] % - , isCleanupFinished = true - , pushChannel - , type = pull - , auditStartTime = {0,0} - , firstSync = true - , accountId +-record(state, { name :: binary() + , type = pull :: pull|push + , channel :: binary() + , keyPrefix :: list() + , tokenPrefix :: list() + , token :: map() + , apiUrl :: binary() + , fetchUrl :: binary() + , contacts = [] :: list() + , clContacts = [] :: list() + , isConnected = true :: boolean() + , isFirstSync = true :: boolean() + , isCleanupFinished = true :: boolean() + , auditStartTime = {0,0} :: tuple() + , template = ?NOT_FOUND :: ?NOT_FOUND|map() + , accountId :: system|integer() }). % dperl_strategy_scr export @@ -92,6 +120,8 @@ % dderl_oauth exports -export([ get_auth_config/0 , get_auth_config/1 + , get_auth_token_key_prefix/0 + , get_auth_token_key_prefix/1 , get_key_prefix/0 , get_key_prefix/1 ]). @@ -100,6 +130,10 @@ get_auth_config() -> ?OAUTH2_CONFIG(<<>>). get_auth_config(JobName) -> ?OAUTH2_CONFIG(JobName). +get_auth_token_key_prefix() -> ?OAUTH2_TOKEN_KEY_PREFIX(<<>>). + +get_auth_token_key_prefix(JobName) -> ?OAUTH2_TOKEN_KEY_PREFIX(JobName). + get_key_prefix() -> ?KEY_PREFIX(<<>>). get_key_prefix(JobName) -> ?KEY_PREFIX(JobName). @@ -123,30 +157,31 @@ format_remote_values_to_kv([Value|Values], KeyPrefix, JobName, Acc) -> Key = KeyPrefix ++ [local_contact_id(RemoteId)], format_remote_values_to_kv(Values, KeyPrefix, JobName, [{Key,RemoteId,format_value(Value, JobName)}|Acc]). -% format remote or local value by projecting it down to configured list of synced attributes -format_value(Value, JobName) when is_map(Value) -> maps:with(?CONTACT_ATTRIBUTES(JobName), Value). +% format remote or local value by projecting it down to configured list of synced (meta + content) attributes +format_value(Value, JobName) when is_map(Value) -> + maps:with(?META_ATTRIBUTES(JobName)++?CONTENT_ATTRIBUTES(JobName), Value). connect_check_src(#state{isConnected=true} = State) -> {ok, State}; -connect_check_src(#state{isConnected=false, accountId=AccountId, keyPrefix=KeyPrefix} = State) -> +connect_check_src(#state{isConnected=false, accountId=AccountId, tokenPrefix=TokenPrefix} = State) -> ?JTrace("Refreshing access token"), - case dderl_oauth:refresh_access_token(AccountId, KeyPrefix, ?SYNC_OFFICE365) of - {ok, AccessToken} -> + case dderl_oauth:refresh_access_token(AccountId, TokenPrefix, ?SYNC_OFFICE365) of + {ok, Token} -> ?Info("new access token fetched"), - {ok, State#state{accessToken=AccessToken, isConnected=true}}; + {ok, State#state{token=Token, isConnected=true}}; {error, Error} -> ?JError("Unexpected response : ~p", [Error]), {error, Error, State} end. get_source_events(#state{auditStartTime=LastStartTime, type=push, - pushChannel=PushChannel, firstSync=FirstSync} = State, BulkSize) -> - case dperl_dal:read_audit_keys(PushChannel, LastStartTime, BulkSize) of + channel=Channel, isFirstSync=IsFirstSync} = State, BulkSize) -> + case dperl_dal:read_audit_keys(Channel, LastStartTime, BulkSize) of {LastStartTime, LastStartTime, []} -> if - FirstSync == true -> + IsFirstSync -> ?JInfo("Audit rollup is complete"), - {ok, sync_complete, State#state{firstSync=false}}; + {ok, sync_complete, State#state{isFirstSync=false}}; true -> {ok, sync_complete, State} end; @@ -159,14 +194,15 @@ get_source_events(#state{auditStartTime=LastStartTime, type=push, get_source_events(#state{contacts=[]} = State, _BulkSize) -> {ok, sync_complete, State}; get_source_events(#state{contacts=Contacts} = State, _BulkSize) -> + ?Info("get_source_events result count ~p~n~p",[length(Contacts), hd(Contacts)]), {ok, Contacts, State#state{contacts=[]}}. connect_check_dst(State) -> {ok, State}. % Question: Why defaulted for push destination? do_refresh(_State, _BulkSize) -> {error, cleanup_only}. -fetch_src(Key, #state{pushChannel=PushChannel, type=push}) -> - dperl_dal:read_channel(PushChannel, Key); +fetch_src(Key, #state{channel=Channel, type=push}) -> + dperl_dal:read_channel(Channel, Key); fetch_src(Key, #state{clContacts=ClContacts, type=pull}) -> case lists:keyfind(Key, 1, ClContacts) of {Key, _RemoteId, Value} -> Value; @@ -174,11 +210,11 @@ fetch_src(Key, #state{clContacts=ClContacts, type=pull}) -> end. fetch_dst(Key, #state{name=Name, clContacts=ClContacts, type=push, - apiUrl=ApiUrl, accessToken=AccessToken} = State) -> + apiUrl=ApiUrl, token=Token} = State) -> case lists:keyfind(Key, 1, ClContacts) of {Key, RemoteId, _Value} -> ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), - case exec_req(ContactUrl, AccessToken) of + case exec_req(ContactUrl, Token) of #{<<"id">> := _} = RValue -> format_value(RValue, Name); {error, unauthorized} -> reconnect_exec(State, fetch_dst, [Key]); {error, Error} -> {error, Error}; @@ -190,33 +226,44 @@ fetch_dst(Key, #state{name=Name, clContacts=ClContacts, type=push, fetch_dst(Key, #state{channel=Channel}) -> dperl_dal:read_channel(Channel, Key). -insert_dst(Key, Value, #state{name=Name, channel=Channel, pushChannel=PushChannel, type=push, - keyPrefix=KeyPrefix, apiUrl=ApiUrl, accessToken=AccessToken} = State) -> - case exec_req(ApiUrl, AccessToken, Value, post) of - #{<<"id">> := Id} = RemoteValue -> - NewKey = KeyPrefix ++ [local_contact_id(Id)], - FormRemote = format_value(RemoteValue, Name), - PushValue = dperl_dal:read_channel(PushChannel, Key), - MergeValue = maps:merge(PushValue, FormRemote), - MergedBin = imem_json:encode(MergeValue), - dperl_dal:remove_from_channel(PushChannel, Key), - dperl_dal:write_channel(Channel, NewKey, MergedBin), - dperl_dal:write_channel(PushChannel, NewKey, MergedBin), - {false, State}; - {error, unauthorized} -> - reconnect_exec(State, insert_dst, [Key, Value]); - {error, Error} -> - {error, Error} +insert_dst(Key, Value, #state{type=push, apiUrl=ApiUrl, token=Token} = State) -> + case exec_req(ApiUrl, Token, Value, post) of + #{<<"id">> := _} = RemoteValue -> merge_meta_to_local(Key, RemoteValue, State); + {error, unauthorized} -> reconnect_exec(State, insert_dst, [Key, Value]); + {error, Error} -> {error, Error} end; insert_dst(Key, Value, State) -> - update_dst(Key, Value, State). + Result = update_dst(Key, Value, State), + ?Info("insert_dst ~p~n~p~nresult ~p",[Key, Value, Result]), + Result. + +merge_meta_to_local(Key, RemoteValue, #state{channel=Channel, tokenPrefix=TokenPrefix} = State) -> + AccessId = access_id(TokenPrefix), + MetaItem = #{<<"id">> => maps:get(<<"id">>, RemoteValue)}, + case dperl_dal:read_channel(Channel, Key) of + #{<<"META">> := Meta} = LocVal -> + case maps:merge(Meta, #{AccessId => MetaItem}) of + Meta -> + ok; % RemoteMeta already there + NewM -> + MergedBin = imem_json:encode(LocVal#{<<"META">> => NewM}), + dperl_dal:write_channel(Channel, Key, MergedBin) + end; + LocVal -> + MergedBin = imem_json:encode(LocVal#{<<"META">> => MetaItem}), + dperl_dal:write_channel(Channel, Key, MergedBin) + end, + {false, State}. + +access_id(TokenPrefix) -> + list_to_binary(string:join(TokenPrefix,"/")). delete_dst(Key, #state{channel=Channel, type=push, clContacts=ClContacts, - apiUrl=ApiUrl, accessToken=AccessToken} = State) -> + apiUrl=ApiUrl, token=Token} = State) -> case lists:keyfind(Key, 1, ClContacts) of {Key, RemoteId, _Value} -> ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), - case exec_req(ContactUrl, AccessToken, #{}, delete) of + case exec_req(ContactUrl, Token, #{}, delete) of ok -> dperl_dal:remove_from_channel(Channel, Key), {false, State}; @@ -228,26 +275,24 @@ delete_dst(Key, #state{channel=Channel, type=push, clContacts=ClContacts, false -> {false, State} end; -delete_dst(Key, #state{channel=Channel, pushChannel=PushChannel} = State) -> +delete_dst(Key, #state{channel=Channel} = State) -> + ?Info("delete_dst ~p",[Key]), dperl_dal:remove_from_channel(Channel, Key), - dperl_dal:remove_from_channel(PushChannel, Key), {false, State}. -spec update_dst(Key::list(), Value::map(), #state{}) -> tuple(). -update_dst(Key, Value, #state{name=Name, channel=Channel, pushChannel=PushChannel, type=push, - clContacts=ClContacts, apiUrl=ApiUrl, accessToken=AccessToken} = State) -> +update_dst(Key, Value, #state{name=Name, channel=Channel, type=push, + clContacts=ClContacts, apiUrl=ApiUrl, token=Token} = State) -> case lists:keyfind(Key, 1, ClContacts) of {Key, RemoteId, _Value} -> ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), - case exec_req(ContactUrl, AccessToken, Value, patch) of + case exec_req(ContactUrl, Token, Value, patch) of #{<<"id">> := _} = RemoteValue -> FormRemote = format_value(RemoteValue, Name), - OldValue = dperl_dal:read_channel(PushChannel, Key), + OldValue = dperl_dal:read_channel(Channel, Key), MergeValue = maps:merge(OldValue, FormRemote), MergedBin = imem_json:encode(MergeValue), - dperl_dal:remove_from_channel(PushChannel, Key), dperl_dal:write_channel(Channel, Key, MergedBin), - dperl_dal:write_channel(PushChannel, Key, MergedBin), {false, State}; {error, unauthorized} -> reconnect_exec(State, update_dst, [Key, Value]); @@ -257,27 +302,29 @@ update_dst(Key, Value, #state{name=Name, channel=Channel, pushChannel=PushChanne false -> {false, State} end; -update_dst(Key, Value, #state{channel=Channel, pushChannel=PushChannel} = State) when is_map(Value) -> +update_dst(Key, Value, #state{channel=Channel} = State) when is_map(Value) -> OldValue = dperl_dal:read_channel(Channel, Key), MergeValue = maps:merge(OldValue, Value), MergedBin = imem_json:encode(MergeValue), dperl_dal:write_channel(Channel, Key, MergedBin), - dperl_dal:write_channel(PushChannel, Key, MergedBin), {false, State}. report_status(_Key, _Status, _State) -> no_op. -load_dst_after_key(CurKey, BlkCount, #state{channel = Channel}) -> - dperl_dal:read_gt(Channel, CurKey, BlkCount). +load_dst_after_key(CurKey, BlkCount, #state{type=pull, keyPrefix=KeyPrefix} = State) when CurKey < KeyPrefix -> + load_dst_after_key(KeyPrefix, BlkCount, State); +load_dst_after_key(CurKey, BlkCount, #state{channel=Channel, type=pull, keyPrefix=KeyPrefix}) -> + Filter = fun({K,_}) -> lists:prefix(KeyPrefix,K) end, + lists:filter(Filter, dperl_dal:read_gt(Channel, CurKey, BlkCount)). load_src_after_key(CurKey, BlkCount, #state{type=pull, fetchUrl=undefined, apiUrl=ApiUrl} = State) -> UrlParams = dperl_dal:url_enc_params(#{"$top" => integer_to_list(BlkCount)}), ContactsUrl = erlang:iolist_to_binary([ApiUrl, "?", UrlParams]), load_src_after_key(CurKey, BlkCount, State#state{fetchUrl=ContactsUrl}); load_src_after_key(CurKey, BlkCount, #state{name=Name, type=pull, isCleanupFinished=true, - keyPrefix=KeyPrefix, accessToken=AccessToken, fetchUrl=FetchUrl} = State) -> + keyPrefix=KeyPrefix, token=Token, fetchUrl=FetchUrl} = State) -> % fetch all contacts - case fetch_all_contacts(FetchUrl, AccessToken, KeyPrefix, Name) of + case fetch_all_contacts(FetchUrl, Token, KeyPrefix, Name) of {ok, Contacts} -> load_src_after_key(CurKey, BlkCount, State#state{clContacts=Contacts, isCleanupFinished=false}); {error, unauthorized} -> @@ -285,7 +332,7 @@ load_src_after_key(CurKey, BlkCount, #state{name=Name, type=pull, isCleanupFinis {error, Error} -> {error, Error, State} end; -load_src_after_key(CurKey, BlkCount, #state{clContacts=Contacts} = State) -> +load_src_after_key(CurKey, BlkCount, #state{type=pull, clContacts=Contacts} = State) -> {ok, get_contacts_gt(CurKey, BlkCount, Contacts), State}. reconnect_exec(State, Fun, Args) -> @@ -309,7 +356,7 @@ get_status(#state{}) -> #{}. init_state(_) -> #state{}. init({#dperlJob{name=Name, srcArgs=#{apiUrl:=ApiUrl}, args=Args, - dstArgs=#{channel:=Channel, pushChannel:=PChannel} = DstArgs}, State}) -> + dstArgs=#{channel:=Channel} = DstArgs}, State}) -> case dperl_auth_cache:get_enc_hash(Name) of undefined -> ?JError("Encryption hash is not avaialable"), @@ -318,18 +365,27 @@ init({#dperlJob{name=Name, srcArgs=#{apiUrl:=ApiUrl}, args=Args, ?JInfo("Starting with ~p's enchash...", [AccountId]), imem_enc_mnesia:put_enc_hash(EncHash), KeyPrefix = maps:get(keyPrefix, DstArgs, get_key_prefix(Name)), - case dderl_oauth:get_token_info(AccountId, KeyPrefix, ?SYNC_OFFICE365) of - #{<<"access_token">> := AccessToken} -> - ChannelBin = dperl_dal:to_binary(Channel), - PChannelBin = dperl_dal:to_binary(PChannel), + TokenPrefix = get_auth_token_key_prefix(Name), + case dderl_oauth:get_token_info(AccountId, TokenPrefix, ?SYNC_OFFICE365) of + #{<<"access_token">> := Token} -> Type = maps:get(type, Args, pull), + ChannelBin = dperl_dal:to_binary(Channel), dperl_dal:create_check_channel(ChannelBin), - dperl_dal:create_check_channel(PChannelBin), - {ok, State#state{channel = ChannelBin, name = Name, apiUrl = ApiUrl, - keyPrefix = KeyPrefix, accessToken = AccessToken, - pushChannel = PChannelBin, type = Type, accountId = AccountId}}; + ContactIff = <<"fun() ->imem_index:gen_iff_binterm_list_pattern([\"contact\",'_','_']) end.">>, + PLContact = [{':',<<"id">>, {'#', <<"values">>, {':', <<"META">>, <<"cvalue">>}}}], + IdxContact = #ddIdxDef{id = ?CONTACT_INDEXID + ,name = <<"idx_contact">> + ,type = iv_k + ,pl = PLContact + ,vnf = <<"fun imem_index:vnf_identity/1.">> + ,iff = ContactIff}, + dperl_dal:create_check_index(ChannelBin, [IdxContact]), + {ok, State#state{ name=Name, type=Type, channel=ChannelBin, keyPrefix=KeyPrefix + , apiUrl=ApiUrl, tokenPrefix=TokenPrefix + , token=Token, accountId = AccountId + , template=dperl_dal:read_channel(Channel, KeyPrefix)}}; _ -> - ?JError("Access token not found for ~p at ~p", [AccountId, KeyPrefix]), + ?JError("Access token not found for ~p at ~p", [AccountId, TokenPrefix]), {stop, badarg} end end; @@ -357,16 +413,16 @@ terminate(Reason, _State) -> %% Fetch all remote contacts, create 3-tuple {Key::list(), RemoteId::binary(), RemoteValue::map()) %% Sort by Key (needed for sync) -fetch_all_contacts(Url, AccessToken, KeyPrefix, JobName) -> - fetch_all_contacts(Url, AccessToken, KeyPrefix, JobName, []). +fetch_all_contacts(Url, Token, KeyPrefix, JobName) -> + fetch_all_contacts(Url, Token, KeyPrefix, JobName, []). -fetch_all_contacts(Url, AccessToken, KeyPrefix, JobName, AccContacts) -> +fetch_all_contacts(Url, Token, KeyPrefix, JobName, AccContacts) -> ?JTrace("Fetching contacts with url : ~s", [Url]), ?JTrace("Fetched contacts : ~p", [length(AccContacts)]), - case exec_req(Url, AccessToken) of + case exec_req(Url, Token) of #{<<"@odata.nextLink">> := NextUrl, <<"value">> := MoreContacts} -> Contacts = format_remote_values_to_kv(MoreContacts, KeyPrefix, JobName), - fetch_all_contacts(NextUrl, AccessToken, KeyPrefix, lists:append(Contacts, AccContacts)); + fetch_all_contacts(NextUrl, Token, KeyPrefix, lists:append(Contacts, AccContacts)); #{<<"value">> := MoreContacts} -> Contacts = format_remote_values_to_kv(MoreContacts, KeyPrefix, JobName), {ok, lists:keysort(1, lists:append(Contacts, AccContacts))}; @@ -385,11 +441,11 @@ get_contacts_gt(CurKey, BlkCount, [{Key, _} | Contacts], Acc) when Key =< CurKey get_contacts_gt(CurKey, BlkCount, [Contact | Contacts], Acc) -> get_contacts_gt(CurKey, BlkCount, Contacts, [Contact | Acc]). --spec exec_req(Url::binary()|string(), AccessToken::binary()) -> tuple(). -exec_req(Url, AccessToken) when is_binary(Url) -> - exec_req(binary_to_list(Url), AccessToken); -exec_req(Url, AccessToken) -> - AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}], +-spec exec_req(Url::binary()|string(), Token::binary()) -> tuple(). +exec_req(Url, Token) when is_binary(Url) -> + exec_req(binary_to_list(Url), Token); +exec_req(Url, Token) -> + AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(Token)}], case httpc:request(get, {Url, AuthHeader}, [], []) of {ok, {{_, 200, "OK"}, _, Result}} -> imem_json:decode(list_to_binary(Result), [return_maps]); @@ -399,11 +455,11 @@ exec_req(Url, AccessToken) -> {error, Error} end. --spec exec_req(Url::binary()|string(), AccessToken::binary(), Body::map(), Method::atom()) -> tuple(). -exec_req(Url, AccessToken, Body, Method) when is_binary(Url), is_map(Body) -> - exec_req(binary_to_list(Url), AccessToken, Body, Method); -exec_req(Url, AccessToken, Body, Method) -> - AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}], +-spec exec_req(Url::binary()|string(), Token::binary(), Body::map(), Method::atom()) -> tuple(). +exec_req(Url, Token, Body, Method) when is_binary(Url), is_map(Body) -> + exec_req(binary_to_list(Url), Token, Body, Method); +exec_req(Url, Token, Body, Method) -> + AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(Token)}], % Headers = [AuthHeader, {"Contnet-type", "application/json"}], case httpc:request(Method, {Url, AuthHeader, "application/json", imem_json:encode(Body)}, [], []) of {ok, {{_, 201, _}, _, Result}} -> From 24416a1daad13cdaa242baae9e927cc02e42ad8d Mon Sep 17 00:00:00 2001 From: stoch Date: Wed, 10 Jun 2020 14:36:11 +0200 Subject: [PATCH 56/72] move imem hrl files from /include to /src and rename job worker modules --- src/dderl.hrl | 7 +++---- src/dderl_dal.erl | 2 +- src/dderl_data_receiver.erl | 4 ++-- src/dderl_data_sender.erl | 2 +- src/dderl_diff.erl | 2 +- src/dderl_fsm.erl | 2 +- src/dderl_oauth.erl | 1 - src/dderl_rest.erl | 2 +- src/dderlodpi.hrl | 2 +- src/dperl/dperl.hrl | 2 +- src/dperl/dperl_metrics.erl | 2 +- src/dperl/{jobs => }/dperl_status.hrl | 2 +- .../jobs/{dperl_file_copy.erl => dpjob_file_copy.erl} | 4 ++-- .../jobs/{dperl_office_365.erl => dpjob_office_365.erl} | 8 ++++---- ...{dperl_ouraring_crawl.erl => dpjob_ouraring_crawl.erl} | 4 ++-- .../jobs/{dperl_skvh_copy.erl => dpjob_skvh_copy.erl} | 4 ++-- .../jobs/{dperl_status_agr.erl => dpjob_status_agr.erl} | 4 ++-- .../jobs/{dperl_status_pre.erl => dpjob_status_pre.erl} | 4 ++-- .../jobs/{dperl_status_pull.erl => dpjob_status_pull.erl} | 4 ++-- src/gen_adapter.erl | 2 +- src/imem_adapter.erl | 2 +- src/odpi_adapter.erl | 2 +- test/dperl_file_copy_SUITE.erl | 4 ++-- 23 files changed, 35 insertions(+), 37 deletions(-) rename src/dperl/{jobs => }/dperl_status.hrl (98%) rename src/dperl/jobs/{dperl_file_copy.erl => dpjob_file_copy.erl} (99%) rename src/dperl/jobs/{dperl_office_365.erl => dpjob_office_365.erl} (99%) rename src/dperl/jobs/{dperl_ouraring_crawl.erl => dpjob_ouraring_crawl.erl} (99%) rename src/dperl/jobs/{dperl_skvh_copy.erl => dpjob_skvh_copy.erl} (99%) rename src/dperl/jobs/{dperl_status_agr.erl => dpjob_status_agr.erl} (99%) rename src/dperl/jobs/{dperl_status_pre.erl => dpjob_status_pre.erl} (98%) rename src/dperl/jobs/{dperl_status_pull.erl => dpjob_status_pull.erl} (99%) diff --git a/src/dderl.hrl b/src/dderl.hrl index 50782fac..b503834c 100644 --- a/src/dderl.hrl +++ b/src/dderl.hrl @@ -1,10 +1,9 @@ -ifndef(DDERL_HRL). -define(DDERL_HRL, true). --include_lib("imem/include/imem_meta.hrl"). --include_lib("imem/include/imem_exports.hrl"). -% -include("dderl/_checkouts/imem/include/imem_config.hrl"). - +-include_lib("imem/src/imem_config.hrl"). +-include_lib("imem/src/imem_meta.hrl"). +-include_lib("imem/src/imem_exports.hrl"). -define(DEFAULT_ROW_SIZE, 100). -record(viewstate, diff --git a/src/dderl_dal.erl b/src/dderl_dal.erl index afa0e302..350cddbf 100644 --- a/src/dderl_dal.erl +++ b/src/dderl_dal.erl @@ -607,7 +607,7 @@ is_local_query(Qry) -> can_connect_locally(Sess) -> erlimem_session:run_cmd(Sess, have_permission, [[?USE_LOCAL_CONN]]) == true. --spec conn_permission({atom(), pid()}, ddEntityId(), #ddConn{}) -> boolean(). +-spec conn_permission({atom(), pid()}, integer()|atom(), #ddConn{}) -> boolean(). conn_permission(_Sess, UserId, #ddConn{owner=UserId}) -> true; %% If it is the owner always allow usage. conn_permission(Sess, _UserId, #ddConn{id=ConnId, owner=system}) -> erlimem_session:run_cmd(Sess, have_permission, [?USE_SYS_CONNS]) orelse %% If it can use system connections. diff --git a/src/dderl_data_receiver.erl b/src/dderl_data_receiver.erl index 34b2d3d9..b542b005 100644 --- a/src/dderl_data_receiver.erl +++ b/src/dderl_data_receiver.erl @@ -3,8 +3,8 @@ -include("dderl.hrl"). -include("gres.hrl"). --include_lib("imem/include/imem_sql.hrl"). %% Included for stmtCol record --include_lib("imem/include/imem_meta.hrl"). %% Included for config access +-include_lib("imem/src/imem_sql.hrl"). %% Included for stmtCol record +-include_lib("imem/src/imem_meta.hrl"). %% Included for config access -export([start_link/4 ,get_status/2 diff --git a/src/dderl_data_sender.erl b/src/dderl_data_sender.erl index a6d87c34..49db3e73 100644 --- a/src/dderl_data_sender.erl +++ b/src/dderl_data_sender.erl @@ -2,7 +2,7 @@ -behaviour(gen_server). -include("dderl.hrl"). --include_lib("imem/include/imem_meta.hrl"). %% Included for config access +-include_lib("imem/src/imem_meta.hrl"). %% Included for config access -export([start_link/3 ,connect/2 diff --git a/src/dderl_diff.erl b/src/dderl_diff.erl index 59df38c9..c19dff3d 100644 --- a/src/dderl_diff.erl +++ b/src/dderl_diff.erl @@ -2,7 +2,7 @@ -include("dderl.hrl"). -include("gres.hrl"). --include_lib("imem/include/imem_sql.hrl"). +-include_lib("imem/src/imem_sql.hrl"). -export([ term_diff/6 diff --git a/src/dderl_fsm.erl b/src/dderl_fsm.erl index c47ec67e..9f250770 100644 --- a/src/dderl_fsm.erl +++ b/src/dderl_fsm.erl @@ -7,7 +7,7 @@ -include("dderl.hrl"). -include("gres.hrl"). --include_lib("imem/include/imem_sql.hrl"). +-include_lib("imem/src/imem_sql.hrl"). -ifndef(NoFilter). %% TODO driver should translate for the same effect diff --git a/src/dderl_oauth.erl b/src/dderl_oauth.erl index 733a0963..03676ee9 100644 --- a/src/dderl_oauth.erl +++ b/src/dderl_oauth.erl @@ -1,7 +1,6 @@ -module(dderl_oauth). -include("dderl.hrl"). -% -include("dderl/_checkouts/imem/include/imem_config.hrl"). -define(TOKEN_KEYPART, "#token#"). diff --git a/src/dderl_rest.erl b/src/dderl_rest.erl index ae476ffc..b4f063b6 100644 --- a/src/dderl_rest.erl +++ b/src/dderl_rest.erl @@ -2,7 +2,7 @@ -behaviour(cowboy_loop). -include("dderl.hrl"). --include_lib("imem/include/imem_sql.hrl"). +-include_lib("imem/src/imem_sql.hrl"). % gen_server exports -export([start_link/0, init/1, terminate/2, handle_call/3, handle_cast/2, diff --git a/src/dderlodpi.hrl b/src/dderlodpi.hrl index e02d3537..c72c6a5a 100644 --- a/src/dderlodpi.hrl +++ b/src/dderlodpi.hrl @@ -2,7 +2,7 @@ -define(DDERLODPI_HRL, true). -include("dderl.hrl"). --include_lib("imem/include/imem_sql.hrl"). +-include_lib("imem/src/imem_sql.hrl"). -define(InitTimeout, 3600000). -define(ExecTimeout, 3600000). diff --git a/src/dperl/dperl.hrl b/src/dperl/dperl.hrl index 97fc8942..e4db8898 100644 --- a/src/dperl/dperl.hrl +++ b/src/dperl/dperl.hrl @@ -3,7 +3,7 @@ -define(LOG_TAG, "_dperl_"). --include("../dderl.hrl"). % -include_lib("dderl/src/dderl.hrl"). +-include("../dderl.hrl"). -type plan() :: at_most_once|at_least_once|on_all_nodes. diff --git a/src/dperl/dperl_metrics.erl b/src/dperl/dperl_metrics.erl index d2c53aa9..4fae5edc 100644 --- a/src/dperl/dperl_metrics.erl +++ b/src/dperl/dperl_metrics.erl @@ -1,7 +1,7 @@ -module(dperl_metrics). -include("dperl.hrl"). --include_lib("jobs/dperl_status.hrl"). +-include("dperl_status.hrl"). -behaviour(imem_gen_metrics). diff --git a/src/dperl/jobs/dperl_status.hrl b/src/dperl/dperl_status.hrl similarity index 98% rename from src/dperl/jobs/dperl_status.hrl rename to src/dperl/dperl_status.hrl index 20e062d3..0546dced 100644 --- a/src/dperl/jobs/dperl_status.hrl +++ b/src/dperl/dperl_status.hrl @@ -1,7 +1,7 @@ -ifndef(_dperl_STATUS_HRL_). -define(_dperl_STATUS_HRL_, true). --include_lib("dperl/dperl.hrl"). +-include("dperl.hrl"). -record(context, { name :: list(), diff --git a/src/dperl/jobs/dperl_file_copy.erl b/src/dperl/jobs/dpjob_file_copy.erl similarity index 99% rename from src/dperl/jobs/dperl_file_copy.erl rename to src/dperl/jobs/dpjob_file_copy.erl index 4cb12f59..d9e13692 100644 --- a/src/dperl/jobs/dperl_file_copy.erl +++ b/src/dperl/jobs/dpjob_file_copy.erl @@ -1,6 +1,6 @@ --module(dperl_file_copy). +-module(dpjob_file_copy). --include_lib("../dperl.hrl"). +-include("../dperl.hrl"). -behavior(dperl_worker). diff --git a/src/dperl/jobs/dperl_office_365.erl b/src/dperl/jobs/dpjob_office_365.erl similarity index 99% rename from src/dperl/jobs/dperl_office_365.erl rename to src/dperl/jobs/dpjob_office_365.erl index 4da0c550..04676c32 100644 --- a/src/dperl/jobs/dperl_office_365.erl +++ b/src/dperl/jobs/dpjob_office_365.erl @@ -1,10 +1,10 @@ --module(dperl_office_365). +-module(dpjob_office_365). --include_lib("../dperl.hrl"). +-include("../dperl.hrl"). -behavior(dperl_worker). --behavior(dperl_strategy_scr). - +-behavior(dperl_strategy_scr). + -define(OAUTH2_CONFIG(__JOB_NAME), ?GET_CONFIG(oAuth2Config, [__JOB_NAME], diff --git a/src/dperl/jobs/dperl_ouraring_crawl.erl b/src/dperl/jobs/dpjob_ouraring_crawl.erl similarity index 99% rename from src/dperl/jobs/dperl_ouraring_crawl.erl rename to src/dperl/jobs/dpjob_ouraring_crawl.erl index c0de1b91..3a8bd0f1 100644 --- a/src/dperl/jobs/dperl_ouraring_crawl.erl +++ b/src/dperl/jobs/dpjob_ouraring_crawl.erl @@ -1,6 +1,6 @@ --module(dperl_ouraring_crawl). +-module(dpjob_ouraring_crawl). --include_lib("../dperl.hrl"). +-include("../dperl.hrl"). -behavior(dperl_worker). -behavior(dperl_strategy_scr). diff --git a/src/dperl/jobs/dperl_skvh_copy.erl b/src/dperl/jobs/dpjob_skvh_copy.erl similarity index 99% rename from src/dperl/jobs/dperl_skvh_copy.erl rename to src/dperl/jobs/dpjob_skvh_copy.erl index a148fad1..013c8834 100644 --- a/src/dperl/jobs/dperl_skvh_copy.erl +++ b/src/dperl/jobs/dpjob_skvh_copy.erl @@ -1,6 +1,6 @@ --module(dperl_skvh_copy). +-module(dpjob_skvh_copy). --include_lib("dperl/dperl.hrl"). +-include("../dperl.hrl"). -behavior(dperl_worker). -behavior(dperl_strategy_scr). diff --git a/src/dperl/jobs/dperl_status_agr.erl b/src/dperl/jobs/dpjob_status_agr.erl similarity index 99% rename from src/dperl/jobs/dperl_status_agr.erl rename to src/dperl/jobs/dpjob_status_agr.erl index fe17251e..fa8a241b 100644 --- a/src/dperl/jobs/dperl_status_agr.erl +++ b/src/dperl/jobs/dpjob_status_agr.erl @@ -1,6 +1,6 @@ --module(dperl_status_agr). +-module(dpjob_status_agr). --include("dperl_status.hrl"). +-include("../dperl_status.hrl"). -export([sync_puller_error/5, check_nodes/5, merge_node_errors/5, check_memory_on_nodes/5, check_heartbeats/5, check_job_error/5, diff --git a/src/dperl/jobs/dperl_status_pre.erl b/src/dperl/jobs/dpjob_status_pre.erl similarity index 98% rename from src/dperl/jobs/dperl_status_pre.erl rename to src/dperl/jobs/dpjob_status_pre.erl index 408b8da0..a6b3290d 100644 --- a/src/dperl/jobs/dperl_status_pre.erl +++ b/src/dperl/jobs/dpjob_status_pre.erl @@ -1,6 +1,6 @@ --module(dperl_status_pre). +-module(dpjob_status_pre). --include("dperl_status.hrl"). +-include("../dperl_status.hrl"). -export([preprocess/5]). diff --git a/src/dperl/jobs/dperl_status_pull.erl b/src/dperl/jobs/dpjob_status_pull.erl similarity index 99% rename from src/dperl/jobs/dperl_status_pull.erl rename to src/dperl/jobs/dpjob_status_pull.erl index eca12c91..f15414a1 100644 --- a/src/dperl/jobs/dperl_status_pull.erl +++ b/src/dperl/jobs/dpjob_status_pull.erl @@ -1,6 +1,6 @@ --module(dperl_status_pull). +-module(dpjob_status_pull). --include("dperl_status.hrl"). +-include("../dperl_status.hrl"). -behavior(dperl_worker). -behavior(dperl_strategy_scr). diff --git a/src/gen_adapter.erl b/src/gen_adapter.erl index 73265a38..4580d59e 100644 --- a/src/gen_adapter.erl +++ b/src/gen_adapter.erl @@ -3,7 +3,7 @@ -include("dderl.hrl"). -include("gres.hrl"). --include_lib("imem/include/imem_sql.hrl"). +-include_lib("imem/src/imem_sql.hrl"). -export([ process_cmd/6 , init/0 diff --git a/src/imem_adapter.erl b/src/imem_adapter.erl index cb334460..bc22b8e5 100644 --- a/src/imem_adapter.erl +++ b/src/imem_adapter.erl @@ -4,7 +4,7 @@ -include("dderl.hrl"). -include("gres.hrl"). --include_lib("imem/include/imem_sql.hrl"). +-include_lib("imem/src/imem_sql.hrl"). -export([ init/0 , process_cmd/6 diff --git a/src/odpi_adapter.erl b/src/odpi_adapter.erl index 9807c155..e3e0461f 100644 --- a/src/odpi_adapter.erl +++ b/src/odpi_adapter.erl @@ -14,7 +14,7 @@ -include("dderlodpi.hrl"). -include("gres.hrl"). --include_lib("imem/include/imem_sql.hrl"). +-include_lib("imem/src/imem_sql.hrl"). -export([ init/0 , process_cmd/6 diff --git a/test/dperl_file_copy_SUITE.erl b/test/dperl_file_copy_SUITE.erl index 994ebe02..ba269072 100644 --- a/test/dperl_file_copy_SUITE.erl +++ b/test/dperl_file_copy_SUITE.erl @@ -1,9 +1,9 @@ -module(dperl_file_copy_SUITE). --include_lib("../src/dperl/dperl.hrl"). - -include_lib("common_test/include/ct.hrl"). +-include("../src/dperl/dperl.hrl"). + -export([all/0, init_per_suite/1, end_per_suite/1]). -export([test/1]). From 604f5717542360d895d20a9ac7ad810864b2f3be Mon Sep 17 00:00:00 2001 From: stoch Date: Sat, 13 Jun 2020 12:42:45 +0200 Subject: [PATCH 57/72] add specs and comments to dperl_strategy_scr module --- src/dperl/dperl.hrl | 87 ++-- src/dperl/dperl_dal.erl | 6 +- src/dperl/dperl_strategy_scr.erl | 633 ++++++++++++++++------------ src/dperl/dperl_strategy_scr.hrl | 27 ++ src/dperl/jobs/dpjob_office_365.erl | 119 ++++-- 5 files changed, 507 insertions(+), 365 deletions(-) create mode 100644 src/dperl/dperl_strategy_scr.hrl diff --git a/src/dperl/dperl.hrl b/src/dperl/dperl.hrl index e4db8898..1733c417 100644 --- a/src/dperl/dperl.hrl +++ b/src/dperl/dperl.hrl @@ -5,7 +5,24 @@ -include("../dderl.hrl"). --type plan() :: at_most_once|at_least_once|on_all_nodes. +-type jobName() :: binary(). +-type jobModule() :: atom(). +-type jobArgs() :: map(). +-type jobEnabled() :: true|false. +-type jobRunning() :: true|false|undefined. +-type jobPlan() :: at_most_once|at_least_once|on_all_nodes. +-type jobOpts() :: []. % currently none, tbd +-type jobStatus() :: undefined|idle|synced|cleaning|cleaned|refreshing|refreshed|error|stopped. + +-type serviceName() :: binary(). +-type serviceModule() :: atom(). +-type serviceArgs() :: map(). +-type serviceEnabled() :: true|false. +-type serviceRunning() :: true|false|undefined. +-type servicePlan() :: at_most_once|at_least_once|on_all_nodes. +-type serviceOpts() :: []. % currently none, tbd +-type serviceStatus() :: idle|stopped|active|overload. + -define(TABLESPEC(__T,__O), {__T, record_info(fields, __T), ?__T, #__T{}, __O}). @@ -14,50 +31,46 @@ -define(NOT_FOUND, '$notfound'). --record(dperlJob, { - name :: binary(), - module :: atom(), - args :: any(), - srcArgs :: any(), - dstArgs :: any(), - enabled :: true|false, - running :: true|false, - plan :: plan(), - nodes :: [atom()], - opts = [] :: list() - }). +-record(dperlJob, { name :: jobName() + , module :: jobModule() + , args :: any() + , srcArgs :: any() + , dstArgs :: any() + , enabled :: jobEnabled() + , running :: jobRunning() + , plan :: jobPlan() + , nodes :: [atom()] + , opts = [] :: jobOpts() + }). -define(dperlJob, [binstr,atom,term,term,term,boolean,atom,atom,list,list]). -define(JOBDYN_TABLE, 'dperlNodeJobDyn@'). --record(dperlNodeJobDyn, { - name :: binary(), % same as dperlJob.name - state :: map(), - status :: atom(), - statusTime :: ddTimestamp() - }). +-record(dperlNodeJobDyn, { name :: jobName() % same as dperlJob.name + , state :: map() + , status :: jobStatus() + , statusTime :: ddTimestamp() + }). -define(dperlNodeJobDyn, [binstr,map,atom,timestamp]). --record(dperlService, { - name :: binary(), - module :: atom(), - args :: any(), - resource :: any(), - interface :: any(), - enabled :: true|false, - running :: true|false, - plan :: plan(), - nodes :: [atom()], - opts = [] :: list() - }). +-record(dperlService, { name :: serviceName() + , module :: serviceModule() + , args :: any() + , resource :: any() + , interface :: any() + , enabled :: serviceEnabled() + , running :: serviceRunning() + , plan :: servicePlan() + , nodes :: [atom()] + , opts = [] :: serviceOpts() + }). -define(dperlService, [binstr,atom,term,term,term,boolean,atom,atom,list,list]). -define(SERVICEDYN_TABLE, 'dperlServiceDyn@'). --record(dperlServiceDyn, { - name :: binary(), % same as dperlService.name - state :: map(), - status :: atom(), - statusTime :: ddTimestamp() - }). +-record(dperlServiceDyn, { name :: serviceName() % same as dperlService.name + , state :: map() + , status :: serviceStatus() + , statusTime :: ddTimestamp() + }). -define(dperlServiceDyn, [binstr,map,atom,timestamp]). -define(G(__JS,__F), diff --git a/src/dperl/dperl_dal.erl b/src/dperl/dperl_dal.erl index a80c84ab..99271a22 100644 --- a/src/dperl/dperl_dal.erl +++ b/src/dperl/dperl_dal.erl @@ -277,8 +277,7 @@ update_job_dyn(JobName, State) when is_binary(JobName) andalso is_map(State) -> true -> ok end end end); -update_job_dyn(JobName, Status) - when is_binary(JobName) andalso +update_job_dyn(JobName, Status) when is_binary(JobName) andalso (Status == synced orelse Status == cleaning orelse Status == cleaned orelse Status == refreshing orelse Status == refreshed orelse @@ -301,8 +300,7 @@ update_job_dyn(JobName, Status) end end. -update_job_dyn(JobName, State, Status) - when is_binary(JobName) andalso is_map(State) andalso +update_job_dyn(JobName, State, Status) when is_binary(JobName) andalso is_map(State) andalso (Status == synced orelse Status == undefined orelse Status == cleaning orelse Status == cleaned orelse Status == refreshing orelse Status == refreshed orelse diff --git a/src/dperl/dperl_strategy_scr.erl b/src/dperl/dperl_strategy_scr.erl index 36b55313..7ef33736 100644 --- a/src/dperl/dperl_strategy_scr.erl +++ b/src/dperl/dperl_strategy_scr.erl @@ -1,89 +1,181 @@ -module(dperl_strategy_scr). -include("dperl.hrl"). +-include("dperl_strategy_scr.hrl"). -export([execute/4]). -ifdef(TEST). --export([load_src_after_key/3, load_dst_after_key/3]). +-export([ load_src_after_key/3 + , load_dst_after_key/3 + ]). -endif. --type state() :: any(). - --callback connect_check_src(state()) -> - {ok, state()} | {error, any()} | {error, any(), state()}. --callback get_source_events(state(), integer()) -> - {error, any()} | {ok, list(), state()} | {ok, sync_complete, state()}. --callback connect_check_dst(state()) -> - {ok, state()} | {error, any()} | {error, any(), state()}. --callback fetch_src(any(), state()) -> ?NOT_FOUND | term(). --callback fetch_dst(any(), state()) -> ?NOT_FOUND | term(). --callback delete_dst(any(), state()) -> {true, state()} | {false, state()}. --callback insert_dst(any(), any(), state()) -> {true, state()} | {false, state()}. --callback update_dst(any(), any(), state()) -> {true, state()} | {false, state()}. --callback report_status(any(), any(), state()) -> ok | {error, any()}. --callback do_refresh(state(), integer()) -> - {error, any()} | {ok, state()} | {ok, finish, state()}. +% check if source data is currently acessible +-callback connect_check_src(scrState()) -> + {ok, scrState()} | {error, any()} | {error, any(), scrState()}. + +% get a key list (limited in length) of recent changes on source side +% often achieved by scanning an audit log after last checked timestamp (from state) +% basis for sync cycle (differential change provisioning) +-callback get_source_events(scrState(), scrBatchSize()) -> + {error, scrAnyKey()} | {ok, scrAnyKeys(), scrState()} | {ok, sync_complete, scrState()}. + +% check if destination data is currently acessible +-callback connect_check_dst(scrState()) -> + {ok, scrState()} | {error, any()} | {error, any(), scrState()}. + +% fetch one item from source (if it exists) +-callback fetch_src(scrAnyKey(), scrState()) -> ?NOT_FOUND | scrAnyVal(). + +% fetch one item from destination (if it exists) +-callback fetch_dst(scrAnyKey(), scrState()) -> ?NOT_FOUND | scrAnyVal(). + +% delete one item from destination (if it exists) +% first element in result is true if a delete was unnecessary ???? +-callback delete_dst(scrAnyKey(), scrState()) -> {scrSoftError(), scrState()}. + +% insert one item to destination (which is assumed to not exist) +% first element in result is true if the insert was compatible but unnecessary (already +% existed in correct shape) ???? +% alternatively, the callback implementation can (or should) throw if key already exists +-callback insert_dst(scrAnyKey(), scrAnyVal(), scrState()) -> {scrSoftError(), scrState()}. + +% update one item in destination (which is assumed to exist) +% first element in result tuple indicates wether an update was possible (false=insert needed) ???? +% alternatively, the callback implementation can (or should) throw if the key does not exist +-callback update_dst(scrAnyKey(), scrAnyVal(), scrState()) -> {scrSoftError(), scrState()}. + +% allow the callback implementation act upon an error or warning message +% result is ignored, used for debugging only +-callback report_status(scrAnyKey(), scrStatus(), scrState()) -> ok | no_op | {error, term()}. + +% execute one more refresh cycle with limited block size +-callback do_refresh(scrState(), scrBatchSize()) -> {{ok, scrState()} | {ok, finish, scrState() | error, any()}}. % optional callbacks --callback should_cleanup(ddTimestamp()|undefined, - ddTimestamp()|undefined, - integer(), integer(), state()) -> true | false. --callback should_refresh(ddTimestamp()|undefined, - ddTimestamp()|undefined, - integer(), integer(), [integer()], state()) -> true | false. --callback is_equal(any(), any(), any(), state()) -> true | false. --callback update_channel(any(), boolean(), any(), any(), state()) -> {true, state()} | {false, state()}. --callback finalize_src_events(state()) -> {true, state()} | {false, state()}. --callback should_sync_log(state()) -> true | false. + +% override callback for cleanup execution permission +-callback should_cleanup( LastAttempt::ddTimestamp(), + LastSuccess::ddTimestamp(), + BatchInterval::scrMsecInterval(), % delay between cleanup batches + CycleInterval::scrMsecInterval(), % delay between cleanup cycles + scrState()) -> true | false. + +% override callback for refresh execution permission +-callback should_refresh( LastAttempt::ddTimestamp(), + LastSuccess::ddTimestamp(), + BatchInterval::scrMsecInterval(), % delay between refresh batches + CycleInterval::scrMsecInterval(), % delay between refresh cycles + scrHoursOfDay(), + scrState()) -> true | false. + +% override for value compare function +-callback is_equal(scrAnyKey(), scrAnyVal(), scrAnyVal(), scrState()) -> true | false. + +% override for destination channel insert/update/delete (final data change) +% first element of result indicates if anything was changed on the target ???? +% (ignored by behaviour, used for debugging only) +-callback update_channel( scrAnyKey(), + IsSamePlatform::boolean(), + SourceVal::scrAnyVal(), + DestVal::scrAnyVal(), + scrState()) -> {true, scrState()} | {false, scrState()}. + +% optional cleanup call after end of differential provisioning (sync phase) ???? +% where and when is it exactly invoked ???? +% first element of result indicates if anything was executed on the target ???? +-callback finalize_src_events(scrState()) -> {true, scrState()} | {false, scrState()}. + +% can be used in callback to suppress the logging of individual sync results +-callback should_sync_log(scrState()) -> true | false. -optional_callbacks([should_cleanup/5, should_refresh/6, is_equal/4, update_channel/5, finalize_src_events/1, should_sync_log/1]). --callback do_cleanup(state(), integer()) -> - {error, any()} | {ok, state()} | {ok, finish, state()}. --callback do_cleanup(list(), list(), boolean(), state()) -> - {error, any()} | {ok, state()} | {ok, finish, state()}. --callback do_cleanup(list(), list(), list(), boolean(), state()) -> - {error, any()} | {ok, state()} | {ok, finish, state()}. --optional_callbacks([do_cleanup/2, do_cleanup/4,do_cleanup/5]). +% execute simple cleanup for next batch of keys +-callback do_cleanup( scrState(), CleanupBulkCount::scrBatchSize()) -> + {{ok, scrState()} | {ok, finish, scrState() | error, any()}}. + +% execute cleanup for found differences (Deletes and Inserts) +-callback do_cleanup( Deletes::scrAnyKeys(), + Inserts::scrAnyKeys(), + SearchingMatch::boolean(), % NextLastKey == MinKey + scrState()) -> + {{ok, scrState()} | {ok, finish, scrState() | error, any()}}. + +% execute cleanup/refresh for found differences (Deletes, Inserts and value Diffs) +-callback do_cleanup( Deletes::scrAnyKeys(), + Inserts::scrAnyKeys(), + Diffs::scrAnyKeys(), + SearchingMatch::boolean(), % NextLastKey == MinKey + scrState()) -> + {{ok, scrState()} | {ok, finish, scrState() | error, any()}}. + +% bulk load one batch of keys (for cleanup) or kv-pairs (cleanup/refresh) from source +% a callback module implementing this and load_dst_after_key signals that it wants +% to fully control the cleanup/refresh procedure +-callback load_src_after_key( LastKeySeen::scrAnyKey(), + scrBatchSize(), + scrState()) -> + {ok, scrAnyKeys(), scrState()} | {ok, scrAnyKeyVals(), scrState()} | {error, any(), scrState()}. + +% bulk load one batch of keys (for cleanup) or kv-pairs (cleanup/refresh) from destination +% a callback module implementing this and load_src_after_key signals that it wants +% to fully control the cleanup/refresh procedure +-callback load_dst_after_key( LastKeySeen::scrAnyKey(), + scrBatchSize(), + scrState()) -> + {ok, scrAnyKeys(), scrState()} | {ok, scrAnyKeyVals(), scrState()} | {error, any(), scrState()}. + +-optional_callbacks([ do_cleanup/2 + , do_cleanup/4 + , do_cleanup/5 + , load_src_after_key/3 + , load_dst_after_key/3 + ]). % chunked cleanup context --record(cleanup_ctx, - {srcKeys :: list(), - srcCount :: integer(), - dstKeys :: list(), - dstCount :: integer(), - bulkCount :: integer(), - minKey :: any(), - maxKey :: any(), - lastKey :: any(), - deletes = [] :: list(), - differences = [] :: list(), - inserts = [] :: list()}). - - +-record(cleanup_ctx,{ srcKeys :: scrAnyKeys() + , srcCount :: integer() + , dstKeys :: scrAnyKeys() + , dstCount :: integer() + , bulkCount :: scrBatchSize() + , minKey :: scrAnyKey() + , maxKey :: scrAnyKey() + , lastKey :: scrAnyKey() + , deletes = [] :: scrAnyKeys() + , inserts = [] :: scrAnyKeys() + , differences = [] :: scrAnyKeys() + }). + +% Debug macros -define(DL(__S,__F,__A), ?Debug([{state,__S},{mod, Mod},{job, Job}],__F,__A)). + -define(DL(__S,__F), ?DL(__S,__F,[])). -define(S(__F), ?DL(sync,__F)). -define(S(__F,__A), ?DL(sync,__F,__A)). + -define(C(__F), ?DL(cleanup,__F)). -define(C(__F,__A), ?DL(cleanup,__F,__A)). + -define(R(__F), ?DL(refresh,__F)). -define(R(__F,__A), ?DL(refresh,__F,__A)). +% trigger macro for next action -define(RESTART_AFTER(__Timeout, __Args), erlang:send_after(__Timeout, self(), {internal, {behavior, ?MODULE, __Args}})). --spec execute(atom(), string(), state(), map()) -> state(). -execute(Mod, Job, State, #{sync := _, cleanup := _, refresh := _} - = Args) - when is_map(Args) -> - try execute(sync, Mod, Job, State, Args) catch +% execute (start or restart) the synchronisation job +-spec execute(jobModule(), jobName(), scrState(), jobArgs()) -> scrState(). +execute(Mod, Job, State, #{sync:=_, cleanup:=_, refresh:=_} = Args) when is_map(Args) -> + try + execute(sync, Mod, Job, State, Args) + catch Class:{step_failed, NewArgs}:Stacktrace when is_map(NewArgs) -> ?JError("~p ~p step_failed~n~p", [Mod, Class, Stacktrace]), dperl_dal:update_job_dyn(Job, error), @@ -104,31 +196,29 @@ execute(Mod, Job, State, #{sync := _, cleanup := _, refresh := _} State end; execute(Mod, Job, State, Args) when is_map(Args) -> - execute(Mod, Job, State, - maps:merge(#{sync => true, cleanup => true, refresh => true}, - Args)). - --spec execute(sync|cleanup|refresh, atom(), string(), state(), map()) -> - state() | no_return(). -% [sync] -execute(sync, Mod, Job, State, #{sync := Sync} = Args) -> + execute(Mod, Job, State, maps:merge(#{sync=>true, cleanup=>true, refresh=>true}, Args)). + +-spec execute(scrPhase(), jobModule(), jobName(), scrState(), jobArgs()) -> scrState() | no_return(). +execute(sync, Mod, Job, State, #{sync:=Sync} = Args) -> + % perform a sync cycle put(jstate,s), ?S("Connect/check source if not already connected (trivial for push)"), State1 = - case Mod:connect_check_src(State) of - {error, Error, S1} -> - ?JError("sync(~p) failed at connect_check_src : ~p", [Mod, Error]), - dperl_dal:job_error(<<"sync">>, <<"connect_check_src">>, Error), - error({step_failed, S1}); - {error, Error} -> - ?JError("sync(~p) failed at connect_check_src : ~p", [Mod, Error]), - dperl_dal:job_error(<<"sync">>, <<"connect_check_src">>, Error), - error(step_failed); - {ok, S1} -> - dperl_dal:job_error_close(), - S1 - end, - if Sync == true -> + case Mod:connect_check_src(State) of + {error, Error, S1} -> + ?JError("sync(~p) failed at connect_check_src : ~p", [Mod, Error]), + dperl_dal:job_error(<<"sync">>, <<"connect_check_src">>, Error), + error({step_failed, S1}); + {error, Error} -> + ?JError("sync(~p) failed at connect_check_src : ~p", [Mod, Error]), + dperl_dal:job_error(<<"sync">>, <<"connect_check_src">>, Error), + error(step_failed); + {ok, S1} -> + dperl_dal:job_error_close(), + S1 + end, + if + Sync == true -> ?S("Get pending list of events (max n) to process from source"), case Mod:get_source_events(State1, ?MAX_BULK_COUNT(Mod, Job)) of {error, Error1, S2} -> @@ -174,25 +264,22 @@ execute(sync, Mod, Job, State, #{sync := Sync} = Args) -> dperl_dal:job_error_close(), execute(finish, Mod, Job, State4, Args); %% idle used for dperl_mec_ic to have idle timeout on - %% Try later error from oracle + %% try later error from oracle %% would be removed in the future when new %% behavior is used for mec_ic {idle, State4} -> execute(idle, Mod, Job, State4, Args) end - end; - true -> + end; + true -> ?S("disabled! trying cleanup"), execute(cleanup, Mod, Job, State1, Args) end; - -% [cleanup] -execute(cleanup, Mod, Job, State, #{cleanup := true} = Args) -> +execute(cleanup, Mod, Job, State, #{cleanup:=true} = Args) -> + % perform a cleanup cycle if due put(jstate,c), - #{lastAttempt := LastAttempt, - lastSuccess := LastSuccess} = CleanupState = get_state(cleanup, Job), - ShouldCleanupFun = - case erlang:function_exported(Mod, should_cleanup, 5) of + #{lastAttempt:=LastAttempt, lastSuccess:=LastSuccess} = CleanupState = get_cycle_state(cleanup, Job), + ShouldCleanupFun = case erlang:function_exported(Mod, should_cleanup, 5) of true -> fun(LA, LS, BI, CI) -> Mod:should_cleanup(LA, LS, BI, CI, State) end; false -> fun(LA, LS, BI, CI) -> should_cleanup(LA, LS, BI, CI) end end, @@ -203,30 +290,29 @@ execute(cleanup, Mod, Job, State, #{cleanup := true} = Args) -> ?C("(sync phase was nop) if last cleanup + cleanupInterval < now goto [refresh]"), execute(refresh, Mod, Job, State, Args); true -> - set_state(cleanup, Job, start), - Args1 = - if LastAttempt =< LastSuccess -> - ?JInfo("Starting cleanup cycle"), - case Args of - #{stats := #{cleanup_count := CC} = Stats} -> - Args#{stats => Stats#{cleanup_count => CC + 1}}; - Args -> - Stats = maps:get(stats, Args, #{}), - Args#{stats => Stats#{cleanup_count => 1}} - end; - true -> - case Args of - #{stats := #{cleanup_count := CC} = Stats} -> - Args#{stats => Stats#{cleanup_count => CC + 1}}; - Args -> - ?JInfo("Resuming cleanup cycle"), - Stats = maps:get(stats, Args, #{}), - Args#{stats => Stats#{cleanup_count => 1}} - end + set_cycle_state(cleanup, Job, start), + Args1 = if + LastAttempt =< LastSuccess -> + ?JInfo("Starting cleanup cycle"), + case Args of + #{stats := #{cleanup_count := CC} = Stats} -> + Args#{stats => Stats#{cleanup_count => CC + 1}}; + Args -> + Stats = maps:get(stats, Args, #{}), + Args#{stats => Stats#{cleanup_count => 1}} + end; + true -> + case Args of + #{stats := #{cleanup_count := CC} = Stats} -> + Args#{stats => Stats#{cleanup_count => CC + 1}}; + Args -> + ?JInfo("Resuming cleanup cycle"), + Stats = maps:get(stats, Args, #{}), + Args#{stats => Stats#{cleanup_count => 1}} + end end, ?C("Connect to destination if not already connected (trivial for pull)"), - State1 = - case Mod:connect_check_dst(State) of + State1 = case Mod:connect_check_dst(State) of {error, Error, S1} -> ?JError("cleanup(~p) failed at connect_check_dst : ~p", [Mod, Error]), dperl_dal:job_error(<<"cleanup">>, <<"connect_check_dst">>, Error), @@ -243,21 +329,22 @@ execute(cleanup, Mod, Job, State, #{cleanup := true} = Args) -> ?C("Build a list of provisioning actions to be taken (aggregated audit list)"), ?C("If list provisioning action is non empty: perform the actions; goto [finish]"), CleanupBulkCount = ?MAX_CLEANUP_BULK_COUNT(Mod, Job), - DoCleanupArgs = - case (erlang:function_exported(Mod, load_src_after_key, 3) andalso - erlang:function_exported(Mod, load_dst_after_key, 3)) of - false -> [State1, CleanupBulkCount]; + DoCleanupArgs = case (erlang:function_exported(Mod, load_src_after_key, 3) andalso + erlang:function_exported(Mod, load_dst_after_key, 3)) of + false -> + % launch simple cleanup cycle using do_cleanup/2 + [State1, CleanupBulkCount]; true -> - #{minKey := MinKey, maxKey := MaxKey, - lastKey := LastKey} = CleanupState, - {#{deletes := Deletes, inserts := Inserts, - differences := Diffs, lastKey := NextLastKey}, State2} = - cleanup_refresh_collect( - Mod, - #cleanup_ctx{minKey = MinKey, maxKey = MaxKey, - lastKey = LastKey, bulkCount = CleanupBulkCount}, - State1), - % update last key + % launch cleanup/refresh combined processing using do_cleanup/5 or do_cleanup/4 + #{minKey:=MinKey, maxKey:=MaxKey, lastKey:=LastKey} = CleanupState, + Ctx = #cleanup_ctx{ minKey=MinKey, maxKey=MaxKey + , lastKey=LastKey, bulkCount=CleanupBulkCount}, + {RefreshCollectResult, State2} = cleanup_refresh_collect(Mod,Ctx,State1), + Deletes = maps:get(deletes,RefreshCollectResult), + Inserts = maps:get(inserts,RefreshCollectResult), + Diffs = maps:get(differences,RefreshCollectResult), + NextLastKey = maps:get(lastKey,RefreshCollectResult), + % update last key ToDO: This is UGLY. To be cast into functions !!!! case dperl_dal:select( ?JOBDYN_TABLE, [{#dperlNodeJobDyn{name=Job,_='_'},[],['$_']}]) of @@ -294,25 +381,25 @@ execute(cleanup, Mod, Job, State, #{cleanup := true} = Args) -> error({step_failed, S2}); {ok, S2} -> dperl_dal:job_error_close(), - if length(DoCleanupArgs) == 2 -> - set_state( - cleanup, Job, start, - case Args1 of - #{stats := #{cleanup_count := CC0}} -> - (Mod:get_status(S2))#{count => CC0}; - Args1 -> Mod:get_status(S2) - end); - true -> no_op + if + length(DoCleanupArgs) == 2 -> + set_cycle_state(cleanup, Job, start, + case Args1 of + #{stats := #{cleanup_count := CC0}} -> + (Mod:get_status(S2))#{count => CC0}; + Args1 -> Mod:get_status(S2) + end); + true -> + no_op end, execute(finish, Mod, Job, S2, Args1); {ok, finish, S2} -> - set_state( - cleanup, Job, stop, - case Args1 of - #{stats := #{cleanup_count := CC1}} -> - (Mod:get_status(S2))#{count => CC1}; - Args1 -> Mod:get_status(S2) - end), + set_cycle_state(cleanup, Job, stop, + case Args1 of + #{stats := #{cleanup_count := CC1}} -> + (Mod:get_status(S2))#{count => CC1}; + Args1 -> Mod:get_status(S2) + end), dperl_dal:job_error_close(), ?JInfo("Cleanup cycle is complete"), execute(finish, Mod, Job, S2, Args1) @@ -322,14 +409,11 @@ execute(cleanup, Mod, Job, State, Args) -> put(jstate,c), ?C("disabled! trying refresh"), execute(refresh, Mod, Job, State, Args); - -% [refresh] execute(refresh, Mod, Job, State, #{refresh := true} = Args) -> + % execute a refresh cycle if due put(jstate,r), - #{lastAttempt := LastAttempt, - lastSuccess := LastSuccess} = get_state(refresh, Job), - ShouldRefreshFun = - case erlang:function_exported(Mod, should_refresh, 6) of + #{lastAttempt:=LastAttempt, lastSuccess:=LastSuccess} = get_cycle_state(refresh, Job), + ShouldRefreshFun = case erlang:function_exported(Mod, should_refresh, 6) of true -> fun(LA, LS, BI, RI, RH) -> Mod:should_refresh(LA, LS, BI, RI, RH, State) end; false -> fun(LA, LS, BI, RI, RH) -> should_refresh(LA, LS, BI, RI, RH) end end, @@ -341,9 +425,9 @@ execute(refresh, Mod, Job, State, #{refresh := true} = Args) -> ?R("If current hour is not in refreshHours): goto [idle]"), execute(idle, Mod, Job, State, Args); true -> - set_state(refresh, Job, start), - Args1 = - if LastAttempt =< LastSuccess -> + set_cycle_state(refresh, Job, start), + Args1 = if + LastAttempt =< LastSuccess -> ?JInfo("Starting refresh cycle"), case Args of #{stats := #{refresh_count := RC} = Stats} -> @@ -363,8 +447,7 @@ execute(refresh, Mod, Job, State, #{refresh := true} = Args) -> end end, ?R("Connect to destination if not already connected (trivial for pull)"), - State1 = - case Mod:connect_check_dst(State) of + State1 = case Mod:connect_check_dst(State) of {error, Error, S1} -> ?JError("refresh(~p) failed at connect_check_dst : ~p", [Mod, Error]), dperl_dal:job_error(<<"refresh">>, <<"connect_check_dst">>, Error), @@ -387,23 +470,21 @@ execute(refresh, Mod, Job, State, #{refresh := true} = Args) -> dperl_dal:job_error(<<"refresh">>, <<"do_refresh">>, Error1), error({step_failed, Args1}); {ok, S2} -> - set_state( - refresh, Job, start, - case Args1 of - #{stats := #{refresh_count := RC0}} -> - (Mod:get_status(S2))#{count => RC0}; - Args1 -> Mod:get_status(S2) - end), + set_cycle_state(refresh, Job, start, + case Args1 of + #{stats := #{refresh_count := RC0}} -> + (Mod:get_status(S2))#{count => RC0}; + Args1 -> Mod:get_status(S2) + end), dperl_dal:job_error_close(), execute(finish, Mod, Job, S2, Args1); {ok, finish, S2} -> - set_state( - refresh, Job, stop, - case Args1 of - #{stats := #{refresh_count := RC1}} -> - (Mod:get_status(S2))#{count => RC1}; - Args1 -> Mod:get_status(S2) - end), + set_cycle_state(refresh, Job, stop, + case Args1 of + #{stats := #{refresh_count := RC1}} -> + (Mod:get_status(S2))#{count => RC1}; + Args1 -> Mod:get_status(S2) + end), dperl_dal:job_error_close(), ?JInfo("Refresh cycle is complete"), execute(finish, Mod, Job, S2, Args1) @@ -424,76 +505,67 @@ execute(finish, Mod, Job, State, Args) -> ?RESTART_AFTER(?CYCLE_ALWAYS_WAIT(Mod, Job), Args), State. --spec get_state(cleanup|refresh, binary()) -> - {ddTimestamp() | undefined, ddTimestamp() | undefined}. -get_state(Type, Job) when (Type == cleanup orelse Type == refresh) - andalso is_binary(Job) -> +-spec get_cycle_state(scrCycle(), jobName()) -> scrCycleState(). +get_cycle_state(Cycle, Job) when (Cycle==cleanup orelse Cycle==refresh) andalso is_binary(Job) -> maps:merge( - if Type == cleanup -> - #{minKey => -1, maxKey => <<255>>, lastKey => 0}; - true -> #{} - end, - case dperl_dal:select( - ?JOBDYN_TABLE, - [{#dperlNodeJobDyn{name=Job,state='$1',_='_'},[],['$1']}]) of - {[#{Type:=State}], true} when is_map(State) -> State; - {_, true} -> #{lastAttempt => ?EPOCH, lastSuccess => ?EPOCH} - end). - --spec set_state(cleanup|refresh, binary(), start | stop) -> any(). -set_state(Type, Job, Status) -> set_state(Type, Job, Status, #{}). - --spec set_state(cleanup|refresh, binary(), start | stop, map()) -> any(). -set_state(Type, Job, Status, State0) - when (Type == cleanup orelse Type == refresh) andalso - (Status == start orelse Status == stop) andalso is_binary(Job) -> - {NodeJobDyn, NewStatus0} = - case dperl_dal:select( - ?JOBDYN_TABLE, - [{#dperlNodeJobDyn{name=Job,_='_'},[],['$_']}]) of - {[#dperlNodeJobDyn{state=#{Type:=OldState}} = NJD], true} - when is_map(OldState) -> - {NJD, OldState}; + if + Cycle==cleanup -> #{minKey => -1, maxKey => <<255>>, lastKey => 0}; + true -> #{} + end, + case dperl_dal:select( + ?JOBDYN_TABLE, + [{#dperlNodeJobDyn{name=Job,state='$1',_='_'},[],['$1']}]) of + {[#{Cycle:=CycleState}], true} when is_map(CycleState) -> CycleState; + {_, true} -> #{lastAttempt => ?EPOCH, lastSuccess => ?EPOCH} + end). + +% update dperlNodeJobDyn table according to planned action, reset statistics +-spec set_cycle_state(scrCycle(), jobName(), start | stop) -> ok. +set_cycle_state(Cycle, Job, Action) -> set_cycle_state(Cycle, Job, Action, #{}). + +% update dperlNodeJobDyn table according to planned action, update statistics in +-spec set_cycle_state(scrCycle(), jobName(), start | stop, jobArgs()) -> ok. +set_cycle_state(Cycle, Job, Action, Stats0) + when (Cycle==cleanup orelse Cycle==refresh) andalso + (Action==start orelse Action==stop) andalso is_binary(Job) -> + {NodeJobDyn, CycleState1} = case dperl_dal:select(?JOBDYN_TABLE, + [{#dperlNodeJobDyn{name=Job, _='_'}, [], ['$_']}]) of + {[#dperlNodeJobDyn{state=#{Cycle:=CycleState0}} = NJD], true} when is_map(CycleState0) -> + {NJD, CycleState0}; {[#dperlNodeJobDyn{} = NJD], true} -> - {NJD, #{lastAttempt => os:timestamp(), - lastSuccess => ?EPOCH}} + {NJD, #{lastAttempt=>os:timestamp(), lastSuccess=>?EPOCH}} % ToDo: Should it be imem_meta:time() ???? + end, + {CycleState2, Stats1} = case maps:get(count, Stats0, '$not_found') of + '$not_found' -> {CycleState1, Stats0}; + Count -> {CycleState1#{count => Count}, maps:remove(count, Stats0)} + end, + CycleState3 = if + Action == start -> CycleState2#{lastAttempt => imem_meta:time()}; + true -> CycleState2#{lastSuccess => imem_meta:time()} end, - {NewStatus, State} = - case maps:get(count, State0, '$not_found') of - '$not_found' -> {NewStatus0, State0}; - Count -> - {NewStatus0#{count => Count}, maps:remove(count, State0)} + Status = case {Cycle, Action} of + {cleanup, start} -> cleaning; + {cleanup, stop} -> cleaned; + {refresh, start} -> refreshing; + {refresh, stop} -> refreshed end, - TypeState = case {Type, Status} of - {cleanup, start} -> cleaning; - {cleanup, stop} -> cleaned; - {refresh, start} -> refreshing; - {refresh, stop} -> refreshed - end, - % create 'Type' state if doesn't exists - % if exists update 'LastSuccess' timestamp to current time NodeJobDynState = NodeJobDyn#dperlNodeJobDyn.state, - dperl_dal:update_job_dyn( - Job, - maps:merge( - NodeJobDynState#{ - Type => - if Status == start -> - NewStatus#{lastAttempt => imem_meta:time()}; - true -> - NewStatus#{lastSuccess => imem_meta:time()} - end}, State), TypeState). - -% + NewStateWithStats = maps:merge(NodeJobDynState#{Cycle=>CycleState3}, Stats1), + dperl_dal:update_job_dyn(Job, NewStateWithStats, Status). + % default callbacks -% -should_cleanup(LastAttempt, LastSuccess, BatchInterval, Interval) -> + +-spec should_cleanup(ddTimestamp(), ddTimestamp(), + scrMsecInterval(), scrMsecInterval()) -> true | false. +should_cleanup(LastAttempt, LastSuccess, BatchInterval, CycleInterval) -> if LastAttempt > LastSuccess -> imem_datatype:msec_diff(LastAttempt) > BatchInterval; true -> - imem_datatype:msec_diff(LastSuccess) > Interval + imem_datatype:msec_diff(LastSuccess) > CycleInterval end. +-spec should_refresh(ddTimestamp(), ddTimestamp(), scrMsecInterval(), + scrMsecInterval(), scrHoursOfDay()) -> true | false. should_refresh(LastAttempt, LastSuccess, BatchInterval, Interval, Hours) -> if LastAttempt > LastSuccess -> imem_datatype:msec_diff(LastAttempt) > BatchInterval; @@ -512,6 +584,8 @@ should_refresh(LastAttempt, LastSuccess, BatchInterval, Interval, Hours) -> end end. + +-spec is_equal(scrAnyKey(), scrAnyVal(), scrAnyVal(), scrState()) -> boolean(). is_equal(_Key, S, S, _State) -> true; is_equal(_Key, S, D, _State) when is_map(S), is_map(D) -> dperl_dal:normalize_map(S) == dperl_dal:normalize_map(D); @@ -519,37 +593,38 @@ is_equal(_Key, S, D, _State) when is_list(S), is_list(D) -> lists:sort(S) == lists:sort(D); is_equal(_Key, _, _, _State) -> false. +-spec cleanup_log(string(), scrAnyKeys()) -> no_op | ok. cleanup_log(_Msg, []) -> no_op; cleanup_log(Msg, [K | _] = Keys) when is_integer(K) -> ?JWarn("~s (~p) ~w", [Msg, length(Keys), Keys]); cleanup_log(Msg, Keys) -> ?JWarn("~s (~p) ~p", [Msg, length(Keys), Keys]). +-spec sync_log(scrOperation(), scrAnyKey() | {scrAnyKey(),term()}, boolean()) -> no_op | ok. sync_log(_, _, false) -> no_op; sync_log(Msg, {Key, _}, ShouldLog) -> sync_log(Msg, Key, ShouldLog); sync_log(Msg, Key, _) when is_binary(Key) -> ?JInfo("~s : ~s", [Msg, Key]); sync_log(Msg, Key, _) -> ?JInfo("~s : ~p", [Msg, Key]). -%%---------------- %% chunked cleanup -%% +-spec process_events(scrAnyKeys(), jobModule(), scrState()) -> {boolean(), scrState()}. process_events(Keys, Mod, State) -> - ShouldLog = - case erlang:function_exported(Mod, should_sync_log, 1) of + ShouldLog = case erlang:function_exported(Mod, should_sync_log, 1) of true -> Mod:should_sync_log(State); false -> true end, process_events(Keys, Mod, State, ShouldLog, false). +-spec process_events(scrAnyKeys(), jobModule(), scrState(), boolean(), boolean()) -> + {boolean(), scrState()}. process_events([], Mod, State, _ShouldLog, IsError) -> case erlang:function_exported(Mod, finalize_src_events, 1) of true -> execute_prov_fun(no_log, Mod, finalize_src_events, [State], false, IsError); false -> {IsError, State} end; process_events([Key | Keys], Mod, State, ShouldLog, IsError) -> - {NewIsError, NewState} = - case {Mod:fetch_src(Key, State), Mod:fetch_dst(Key, State)} of + {NewIsError, NewState} = case {Mod:fetch_src(Key, State), Mod:fetch_dst(Key, State)} of {S, S} -> Mod:report_status(Key, no_op, State), {IsError, State}; %% nothing to do @@ -584,8 +659,7 @@ process_events([Key | Keys], Mod, State, ShouldLog, IsError) -> Mod:report_status(Key, {error, Error}, State1), {true, State1}; {S, D} -> - DiffFun = - case erlang:function_exported(Mod, is_equal, 4) of + DiffFun = case erlang:function_exported(Mod, is_equal, 4) of true -> fun Mod:is_equal/4; false -> fun is_equal/4 end, @@ -598,6 +672,8 @@ process_events([Key | Keys], Mod, State, ShouldLog, IsError) -> end, process_events(Keys, Mod, NewState, ShouldLog, NewIsError). +-spec execute_prov_fun(scrOperation(), jobModule(), atom(), list(), boolean(), boolean()) -> + {true | false | idle, scrState() | term()}. execute_prov_fun(Op, Mod, Fun, Args, ShouldLog, IsError) -> case catch apply(Mod, Fun, Args) of {false, NewState} -> @@ -616,20 +692,21 @@ execute_prov_fun(Op, Mod, Fun, Args, ShouldLog, IsError) -> {true, lists:last(Args)} end. +-spec execute_prov_fun(scrOperation(), jobModule(), atom(), list(), boolean(), boolean(), check) -> + {true|false|idle, scrState()|term()}. execute_prov_fun(Op, Mod, Fun, Args, ShouldLog, IsError, check) -> case erlang:function_exported(Mod, Fun, length(Args)) of - true -> execute_prov_fun(Op, Mod, Fun, Args, ShouldLog, IsError); + true -> + execute_prov_fun(Op, Mod, Fun, Args, ShouldLog, IsError); false -> ?Error("Function : ~p not exported in mod : ~p", [Fun, Mod]), {true, lists:last(Args)} end. --spec cleanup_refresh_collect(atom(), #cleanup_ctx{}, state()) -> - #{deletes => list(), inserts => list(), lastKey => any()}. -cleanup_refresh_collect(Mod, - #cleanup_ctx{minKey = MinKey, maxKey = MaxKey, - lastKey = LastKey, bulkCount = BulkCnt} = CleanupCtx, - State) -> +-spec cleanup_refresh_collect(jobModule(), #cleanup_ctx{}, scrState()) -> + #{deletes => scrAnyKeys(), inserts => scrAnyKeys(), lastKey => scrAnyKey()}. +cleanup_refresh_collect(Mod, CleanupCtx, State) -> + #cleanup_ctx{minKey=MinKey, maxKey=MaxKey, lastKey=LastKey, bulkCount=BulkCnt} = CleanupCtx, CurKey = if LastKey =< MinKey -> MinKey; % throw to cycle start if getting LastKey >= MaxKey -> MinKey; % out of key bounds by re-config @@ -657,67 +734,63 @@ cleanup_refresh_collect(Mod, srcKeys = SrcKeys, srcCount = length(SrcKeys), dstKeys = DstKeys, dstCount = length(DstKeys), lastKey = CurKey}), State4}. --spec cleanup_refresh_compare(#cleanup_ctx{}) -> #{deletes => list(), differences => list(), inserts => list(), lastKey => any()}. +-spec cleanup_refresh_compare(#cleanup_ctx{}) -> + #{deletes=>scrAnyKeys(), differences=>scrAnyKeys() + , inserts=>scrAnyKeys(), lastKey=>scrAnyKey()}. cleanup_refresh_compare(#cleanup_ctx{ - srcKeys = SrcKeys, dstKeys = [], deletes = Deletes, - inserts = Inserts, minKey = MinKey, differences = Diffs, - dstCount = DstCount, bulkCount = BulkCnt, srcCount = SrcCount}) - when DstCount < BulkCnt, SrcCount < BulkCnt -> + srcKeys=SrcKeys, dstKeys=[], deletes=Deletes, + inserts=Inserts, minKey=MinKey, differences=Diffs, + dstCount=DstCount, bulkCount=BulkCnt, srcCount=SrcCount}) + when DstCount < BulkCnt, SrcCount < BulkCnt -> Remaining = fetch_keys(SrcKeys), - #{deletes => Deletes, differences => Diffs, inserts => Inserts++Remaining, lastKey => MinKey}; -cleanup_refresh_compare(#cleanup_ctx{srcKeys = SrcKeys, dstKeys = [], deletes = Deletes, dstCount = DstCount, - inserts = Inserts, differences = Diffs}) - when DstCount == 0 -> + #{deletes=>Deletes, differences=>Diffs, inserts=>Inserts++Remaining, lastKey=>MinKey}; +cleanup_refresh_compare(#cleanup_ctx{srcKeys=SrcKeys, dstKeys=[], deletes=Deletes + , dstCount=DstCount, inserts=Inserts, differences=Diffs}) + when DstCount == 0 -> Remaining = fetch_keys(SrcKeys), - #{deletes => Deletes, differences => Diffs, inserts => Inserts++Remaining, lastKey => last_key(SrcKeys)}; -cleanup_refresh_compare(#cleanup_ctx{dstKeys = [], deletes = Deletes, differences = Diffs, - inserts = Inserts, lastKey = LK}) -> - #{deletes => Deletes, differences => Diffs, inserts => Inserts, lastKey => LK}; -cleanup_refresh_compare(#cleanup_ctx{ - srcCount = SrcCount, dstKeys = DstKeys, bulkCount = BulkCnt, - minKey = MinKey, srcKeys = [], deletes = Deletes, - inserts = Inserts, dstCount = DstCount, differences = Diffs}) - when SrcCount < BulkCnt, DstCount < BulkCnt -> + #{deletes=>Deletes, differences=>Diffs, inserts=>Inserts++Remaining, lastKey=>last_key(SrcKeys)}; +cleanup_refresh_compare(#cleanup_ctx{dstKeys=[], deletes=Deletes, differences=Diffs, + inserts=Inserts, lastKey=LK}) -> + #{deletes=>Deletes, differences=>Diffs, inserts=>Inserts, lastKey=>LK}; +cleanup_refresh_compare(#cleanup_ctx{ srcCount=SrcCount, dstKeys=DstKeys,bulkCount=BulkCnt + , minKey=MinKey, srcKeys=[], deletes=Deletes + , inserts=Inserts, dstCount=DstCount, differences=Diffs}) + when SrcCount < BulkCnt, DstCount < BulkCnt -> Remaining = fetch_keys(DstKeys), - #{deletes => Deletes++Remaining, differences => Diffs, inserts => Inserts, lastKey => MinKey}; -cleanup_refresh_compare(#cleanup_ctx{srcKeys = [], deletes = Deletes, - inserts = Inserts, dstKeys = DstKeys, - differences = Diffs, srcCount = SrcCount}) - when SrcCount == 0 -> + #{deletes=>Deletes++Remaining, differences=>Diffs, inserts=>Inserts, lastKey=>MinKey}; +cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[], deletes=Deletes, inserts=Inserts + , dstKeys=DstKeys, differences=Diffs, srcCount=SrcCount}) + when SrcCount == 0 -> Remaining = fetch_keys(DstKeys), - #{deletes => Deletes ++ Remaining, differences => Diffs, inserts => Inserts, lastKey => last_key(DstKeys)}; -cleanup_refresh_compare(#cleanup_ctx{srcKeys = [], deletes = Deletes, differences = Diffs, - inserts = Inserts, lastKey = LK}) -> - #{deletes => Deletes, differences => Diffs, inserts => Inserts, lastKey => LK}; -cleanup_refresh_compare(#cleanup_ctx{srcKeys = [K|SrcKeys], dstKeys = [K|DstKeys]} - = CleanupCtx) -> - cleanup_refresh_compare(CleanupCtx#cleanup_ctx{srcKeys = SrcKeys, dstKeys = DstKeys, - lastKey = last_key([K])}); -cleanup_refresh_compare(#cleanup_ctx{srcKeys = [{K, _} | SrcKeys], dstKeys = [{K, _} | DstKeys], - differences = Diffs} = CleanupCtx) -> - cleanup_refresh_compare(CleanupCtx#cleanup_ctx{srcKeys = SrcKeys, dstKeys = DstKeys, - lastKey = K, differences = [K | Diffs]}); -cleanup_refresh_compare(#cleanup_ctx{srcKeys = [SK|SrcKeys], dstKeys = [DK | DstKeys], - inserts = Inserts, deletes = Deletes} = CleanupCtx) -> + #{deletes=>Deletes++Remaining, differences=>Diffs, inserts=>Inserts, lastKey=>last_key(DstKeys)}; +cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[], deletes=Deletes, differences=Diffs + , inserts=Inserts, lastKey=LK}) -> + #{deletes=>Deletes, differences=>Diffs, inserts=>Inserts, lastKey=>LK}; +cleanup_refresh_compare(#cleanup_ctx{srcKeys=[K|SrcKeys], dstKeys=[K|DstKeys]} = CleanupCtx) -> + cleanup_refresh_compare(CleanupCtx#cleanup_ctx{ srcKeys=SrcKeys, dstKeys=DstKeys + , lastKey = last_key([K])}); +cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[{K, _} | SrcKeys], dstKeys=[{K, _} | DstKeys] + , differences=Diffs} = CleanupCtx) -> + cleanup_refresh_compare(CleanupCtx#cleanup_ctx{ srcKeys=SrcKeys, dstKeys=DstKeys + , lastKey=K, differences=[K | Diffs]}); +cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[SK|SrcKeys], dstKeys=[DK | DstKeys] + , inserts=Inserts, deletes=Deletes} = CleanupCtx) -> case {last_key([SK]), last_key([DK])} of {K1, K2} when K1 < K2 -> cleanup_refresh_compare( - CleanupCtx#cleanup_ctx{srcKeys = SrcKeys, - inserts = [K1|Inserts], lastKey = K1}); + CleanupCtx#cleanup_ctx{srcKeys=SrcKeys, inserts=[K1|Inserts], lastKey=K1}); {K1, K2} when K2 < K1 -> cleanup_refresh_compare( - CleanupCtx#cleanup_ctx{dstKeys = DstKeys, - deletes = [K2|Deletes], lastKey = K2}) + CleanupCtx#cleanup_ctx{dstKeys=DstKeys, deletes=[K2|Deletes], lastKey=K2}) end. +-spec fetch_keys(scrAnyKeys()) -> scrAnyKeys(). fetch_keys([]) -> []; fetch_keys([{_, _} | _] = KVs) -> [K || {K, _} <- KVs]; fetch_keys(Keys) -> Keys. +-spec last_key(scrAnyKeys()) -> scrAnyKey(). last_key([{_, _} | _] = KVs) -> element(1, lists:last(KVs)); last_key(Keys) -> lists:last(Keys). -%% ---------------------- - - %% ---------------------- %% Eunit Tests %% ---------------------- diff --git a/src/dperl/dperl_strategy_scr.hrl b/src/dperl/dperl_strategy_scr.hrl new file mode 100644 index 00000000..e5c6e746 --- /dev/null +++ b/src/dperl/dperl_strategy_scr.hrl @@ -0,0 +1,27 @@ +-ifndef(_dperl_STRATEGY_SCR_HRL_). +-define(_dperl_STRATEGY_SCR_HRL_, true). + +% transparently handled types in this behaviour module (defined in callback module) +-type scrState() :: any(). % processing state details defined in callback module +-type scrAnyKey() :: any(). % mostly scalars or lists of scalars +-type scrAnyKeys() :: [scrAnyKey()]. % list of changed/dirty/all keys +-type scrAnyVal() :: term(). % type decided by callback, usually map() +-type scrAnyKeyVal() :: {scrAnyKey(),scrAnyVal()}. % used for cleanup/refresh combined operations +-type scrAnyKeyVals() :: [scrAnyKeyVal()]. % used for cleanup/refresh combined operations + +% scr behaviour types +-type scrChannel() :: binary(). +-type scrDirection() :: push | pull. +-type scrCycle() :: cleanup | refresh. % stateful phases (sync is stateless) +-type scrCycleState() :: map(). % initially: #{lastAttempt => ?EPOCH, lastSuccess => ?EPOCH} +-type scrPhase() :: sync | scrCycle(). +-type scrBatchSize() :: integer(). +-type scrMessage() :: binary(). +-type scrStatus() :: no_op | {error,term()} | scrMessage(). +-type scrOperation() :: finalize_src_events | no_log | string(). % "Protected"|"Deleted"|"Inserted"|"Updated". +-type scrSoftError() :: true|false. + +-type scrMsecInterval() :: integer(). % delays in milli-seconds +-type scrHoursOfDay() :: [integer()]. % execute only on these hours, [] = any hour + +-endif. \ No newline at end of file diff --git a/src/dperl/jobs/dpjob_office_365.erl b/src/dperl/jobs/dpjob_office_365.erl index 04676c32..4c7ec228 100644 --- a/src/dperl/jobs/dpjob_office_365.erl +++ b/src/dperl/jobs/dpjob_office_365.erl @@ -1,10 +1,22 @@ -module(dpjob_office_365). -include("../dperl.hrl"). +-include("../dperl_strategy_scr.hrl"). -behavior(dperl_worker). -behavior(dperl_strategy_scr). - + +-type locKey() :: [string()]. % local key of a contact, e.g. ["contact","My","Ah2hA77a"] +-type locId() :: string(). % last item in locKey() is called local id e.g. "Ah2hA77a" +-type locVal() :: map(). % local cvalue of a contact, converted to a map for processing +-type locBin() :: binary(). % local cvalue of a contact in binary form (often stored like that) +-type remKey() :: binary(). % remote key of a contact, called <<"id">> in Office365 +-type remKeys():: [remKey()]. % list of remKey() type (e.g. DirtyKeys) +-type remVal() :: map(). % remote value of a contact (relevant fields only) +-type remBin() :: binary(). % remote value of a contract in raw binary JSON form +-type meta() :: map(). % contact meta information with respect to this remote cloud + + -define(OAUTH2_CONFIG(__JOB_NAME), ?GET_CONFIG(oAuth2Config, [__JOB_NAME], @@ -83,23 +95,30 @@ % contacts graph api % https://docs.microsoft.com/en-us/graph/api/resources/contact?view=graph-rest-1.0 --record(state, { name :: binary() - , type = pull :: pull|push - , channel :: binary() - , keyPrefix :: list() - , tokenPrefix :: list() - , token :: map() - , apiUrl :: binary() - , fetchUrl :: binary() - , contacts = [] :: list() - , clContacts = [] :: list() - , isConnected = true :: boolean() - , isFirstSync = true :: boolean() - , isCleanupFinished = true :: boolean() - , auditStartTime = {0,0} :: tuple() - , template = ?NOT_FOUND :: ?NOT_FOUND|map() - , accountId :: system|integer() - }). +%% remote item (single contact info in cache) +-record(remItem, { remKey :: remKey() % remote key (id) + , meta :: meta() % META information (id, ...) + , content :: remVal() % relevant contact info to be synced + }). + +%% scr processing state +-record(state, { name :: jobName() + , type = pull :: scrDirection() + , channel :: scrChannel() % channel name + , keyPrefix :: locKey() % key space prefix in channel + , tokenPrefix :: locKey() % without id #token# + , token :: map() % token info as stored under #token# + , apiUrl :: binary() + , fetchUrl :: binary() + , dirtyKeys = [] :: remKeys() % needing insert/update/delete + , remItems = [] :: list(#remItem{}) % cache for cleanup / ToDo: remove + , isConnected = true :: boolean() + , isFirstSync = true :: boolean() + , isCleanupFinished = true :: boolean() + , auditStartTime = {0,0} :: ddTimestamp() % UTC timestamp {Sec,MicroSec} + , template = ?NOT_FOUND :: ?NOT_FOUND|map() % empty contact with default values + , accountId :: ddEntityId() % data owner + }). % dperl_strategy_scr export -export([ connect_check_src/1 @@ -138,29 +157,34 @@ get_key_prefix() -> ?KEY_PREFIX(<<>>). get_key_prefix(JobName) -> ?KEY_PREFIX(JobName). -% determine the contact id (last piece of cekey) from ckey or from the remote id +% determine the local id as the last piece of ckey (if available) +% or hash of remote id (if new to local store) % this id is a string representing a hash of the remote id --spec local_contact_id(list()|binary()) -> string(). -local_contact_id(Key) when is_list(Key) -> - lists:last(Key); -local_contact_id(Bin) when is_binary(Bin) -> - io_lib:format("~.36B",[erlang:phash2(Bin)]). +-spec local_id(locKey()) -> locId(). +local_id(Key) when is_list(Key) -> lists:last(Key). + + +% calculate a new local id as a string representing the hash of the remote key (id) +-spec new_local_id(remKey()) -> locKey(). +new_local_id(RemKey) when is_binary(RemKey) -> io_lib:format("~.36B",[erlang:phash2(RemKey)]). % convert list of remote values (already maps) to list of {Key,RemoteId,RemoteValue} triples % which serves as a lookup buffer of the complete remote state, avoiding sorting issues +-spec format_remote_values_to_kv(remVal(), locKey(), jobName()) -> remVal(). format_remote_values_to_kv(Values, KeyPrefix, JobName) -> format_remote_values_to_kv(Values, KeyPrefix, JobName, []). format_remote_values_to_kv([], _KeyPrefix, _JobName, Acc) -> Acc; format_remote_values_to_kv([Value|Values], KeyPrefix, JobName, Acc) -> #{<<"id">> := RemoteId} = Value, - Key = KeyPrefix ++ [local_contact_id(RemoteId)], + Key = KeyPrefix ++ [new_local_id(RemoteId)], format_remote_values_to_kv(Values, KeyPrefix, JobName, [{Key,RemoteId,format_value(Value, JobName)}|Acc]). % format remote or local value by projecting it down to configured list of synced (meta + content) attributes format_value(Value, JobName) when is_map(Value) -> maps:with(?META_ATTRIBUTES(JobName)++?CONTENT_ATTRIBUTES(JobName), Value). +-spec connect_check_src(#state{}) -> {ok,#state{}} | {error,any()} | {error,any(), #state{}}. connect_check_src(#state{isConnected=true} = State) -> {ok, State}; connect_check_src(#state{isConnected=false, accountId=AccountId, tokenPrefix=TokenPrefix} = State) -> @@ -174,6 +198,8 @@ connect_check_src(#state{isConnected=false, accountId=AccountId, tokenPrefix=Tok {error, Error, State} end. +-spec get_source_events(#state{}, scrBatchSize()) -> + {ok,remKeys(),#state{}} | {ok,sync_complete,#state{}}. % {error,scrAnyKey()} get_source_events(#state{auditStartTime=LastStartTime, type=push, channel=Channel, isFirstSync=IsFirstSync} = State, BulkSize) -> case dperl_dal:read_audit_keys(Channel, LastStartTime, BulkSize) of @@ -191,27 +217,30 @@ get_source_events(#state{auditStartTime=LastStartTime, type=push, UniqueKeys = lists:delete(undefined, lists:usort(Keys)), {ok, UniqueKeys, State#state{auditStartTime=NextStartTime}} end; -get_source_events(#state{contacts=[]} = State, _BulkSize) -> +get_source_events(#state{dirtyKeys=[]} = State, _BulkSize) -> {ok, sync_complete, State}; -get_source_events(#state{contacts=Contacts} = State, _BulkSize) -> - ?Info("get_source_events result count ~p~n~p",[length(Contacts), hd(Contacts)]), - {ok, Contacts, State#state{contacts=[]}}. +get_source_events(#state{dirtyKeys=DirtyKeys} = State, _BulkSize) -> + ?Info("get_source_events result count ~p~n~p",[length(DirtyKeys), hd(DirtyKeys)]), + {ok, DirtyKeys, State#state{dirtyKeys=[]}}. +- spec connect_check_dst(#state{}) -> {ok, #state{}}. % {error,any()} | {error,any(),#state{}} connect_check_dst(State) -> {ok, State}. % Question: Why defaulted for push destination? -do_refresh(_State, _BulkSize) -> {error, cleanup_only}. +do_refresh(_State, _BulkSize) -> {error, cleanup_only}. % using cleanup/refresh combined +-spec fetch_src(remKey(), #state{}) -> ?NOT_FOUND | locVal() | remVal(). fetch_src(Key, #state{channel=Channel, type=push}) -> dperl_dal:read_channel(Channel, Key); -fetch_src(Key, #state{clContacts=ClContacts, type=pull}) -> - case lists:keyfind(Key, 1, ClContacts) of +fetch_src(Key, #state{remItems=RemItems, type=pull}) -> + case lists:keyfind(Key, 1, RemItems) of {Key, _RemoteId, Value} -> Value; false -> ?NOT_FOUND end. -fetch_dst(Key, #state{name=Name, clContacts=ClContacts, type=push, - apiUrl=ApiUrl, token=Token} = State) -> - case lists:keyfind(Key, 1, ClContacts) of +-spec fetch_dst(remKey(), #state{}) -> ?NOT_FOUND | locVal() | remVal(). +fetch_dst(Key, #state{ name=Name, remItems=RemItems, type=push + , apiUrl=ApiUrl, token=Token} = State) -> + case lists:keyfind(Key, 1, RemItems) of {Key, RemoteId, _Value} -> ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), case exec_req(ContactUrl, Token) of @@ -226,6 +255,7 @@ fetch_dst(Key, #state{name=Name, clContacts=ClContacts, type=push, fetch_dst(Key, #state{channel=Channel}) -> dperl_dal:read_channel(Channel, Key). +-spec insert_dst(remKey(), remVal()|locVal(), #state{}) -> {error, any()}. insert_dst(Key, Value, #state{type=push, apiUrl=ApiUrl, token=Token} = State) -> case exec_req(ApiUrl, Token, Value, post) of #{<<"id">> := _} = RemoteValue -> merge_meta_to_local(Key, RemoteValue, State); @@ -258,9 +288,10 @@ merge_meta_to_local(Key, RemoteValue, #state{channel=Channel, tokenPrefix=TokenP access_id(TokenPrefix) -> list_to_binary(string:join(TokenPrefix,"/")). -delete_dst(Key, #state{channel=Channel, type=push, clContacts=ClContacts, +-spec delete_dst(remKey(), #state{}) -> {scrSoftError(), #state{}}. +delete_dst(Key, #state{channel=Channel, type=push, remItems=RemItems, apiUrl=ApiUrl, token=Token} = State) -> - case lists:keyfind(Key, 1, ClContacts) of + case lists:keyfind(Key, 1, RemItems) of {Key, RemoteId, _Value} -> ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), case exec_req(ContactUrl, Token, #{}, delete) of @@ -280,10 +311,10 @@ delete_dst(Key, #state{channel=Channel} = State) -> dperl_dal:remove_from_channel(Channel, Key), {false, State}. --spec update_dst(Key::list(), Value::map(), #state{}) -> tuple(). +-spec update_dst(remKey(), remVal()|locVal(), #state{}) -> {scrSoftError(), #state{}}. update_dst(Key, Value, #state{name=Name, channel=Channel, type=push, - clContacts=ClContacts, apiUrl=ApiUrl, token=Token} = State) -> - case lists:keyfind(Key, 1, ClContacts) of + remItems=RemItems, apiUrl=ApiUrl, token=Token} = State) -> + case lists:keyfind(Key, 1, RemItems) of {Key, RemoteId, _Value} -> ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), case exec_req(ContactUrl, Token, Value, patch) of @@ -323,16 +354,15 @@ load_src_after_key(CurKey, BlkCount, #state{type=pull, fetchUrl=undefined, apiUr load_src_after_key(CurKey, BlkCount, State#state{fetchUrl=ContactsUrl}); load_src_after_key(CurKey, BlkCount, #state{name=Name, type=pull, isCleanupFinished=true, keyPrefix=KeyPrefix, token=Token, fetchUrl=FetchUrl} = State) -> - % fetch all contacts case fetch_all_contacts(FetchUrl, Token, KeyPrefix, Name) of {ok, Contacts} -> - load_src_after_key(CurKey, BlkCount, State#state{clContacts=Contacts, isCleanupFinished=false}); + load_src_after_key(CurKey, BlkCount, State#state{remItems=Contacts, isCleanupFinished=false}); {error, unauthorized} -> reconnect_exec(State, load_src_after_key, [CurKey, BlkCount]); {error, Error} -> {error, Error, State} end; -load_src_after_key(CurKey, BlkCount, #state{type=pull, clContacts=Contacts} = State) -> +load_src_after_key(CurKey, BlkCount, #state{type=pull, remItems=Contacts} = State) -> {ok, get_contacts_gt(CurKey, BlkCount, Contacts), State}. reconnect_exec(State, Fun, Args) -> @@ -343,10 +373,11 @@ reconnect_exec(State, Fun, Args) -> {error, Error, State1} end. +-spec do_cleanup(remKeys(), remKeys(), remKeys(), boolean(), #state{}) -> {ok, #state{}}. do_cleanup(_Deletes, _Inserts, _Diffs, _IsFinished, #state{type = push}) -> {error, <<"cleanup only for pull job">>}; do_cleanup(Deletes, Inserts, Diffs, IsFinished, State) -> - NewState = State#state{contacts = Inserts ++ Diffs ++ Deletes}, + NewState = State#state{dirtyKeys=Inserts++Diffs++Deletes}, if IsFinished -> {ok, finish, NewState#state{isCleanupFinished=true}}; true -> {ok, NewState} end. From 5605081277a0023d4bd619fa51a50fc89ba89de7 Mon Sep 17 00:00:00 2001 From: shamis Date: Sun, 14 Jun 2020 18:02:41 +0200 Subject: [PATCH 58/72] dpjob_ouraring fetch updated --- src/dderl.hrl | 4 ++-- src/dderl_session.erl | 6 +++--- src/dperl/jobs/dpjob_ouraring_crawl.erl | 16 +++++++++++++++- 3 files changed, 20 insertions(+), 6 deletions(-) diff --git a/src/dderl.hrl b/src/dderl.hrl index b503834c..00905631 100644 --- a/src/dderl.hrl +++ b/src/dderl.hrl @@ -325,8 +325,8 @@ % OAUTH --define(SYNC_OURARING, dperl_ouraring_crawl). +-define(SYNC_OURARING, dpjob_ouraring_crawl). --define(SYNC_OFFICE365, dperl_office_365). +-define(SYNC_OFFICE365, dpjob_office_365). -endif. diff --git a/src/dderl_session.erl b/src/dderl_session.erl index e34272e6..e07b87ca 100644 --- a/src/dderl_session.erl +++ b/src/dderl_session.erl @@ -382,16 +382,16 @@ process_call({[<<"about">>], _ReqData}, _Adapter, From, {SrcIp,_}, State) -> process_call({[<<"office_365_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, _}, State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "office_365_auth_config"}, State), - AuthConfig = dperl_office_365:get_auth_config(), % ToDo: may depend on JobName or TokenPrefix + AuthConfig = dpjob_office_365:get_auth_config(), % ToDo: may depend on JobName or TokenPrefix Url = dderl_oauth:get_authorize_url(State#state.xsrf_token, AuthConfig, ?SYNC_OFFICE365), reply(From, #{<<"office_365_auth_config">> => #{<<"url">> => Url}}, self()), State; process_call({[<<"oura_ring_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, _}, State) -> act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "oura_ring_auth_config"}, State), - AuthConfig = dperl_ouraring_crawl:get_auth_config(), % ToDo: may depend on JobName or TokenPrefix - ?Info("oura_ring_auth_config ~p",[AuthConfig]), + AuthConfig = dpjob_ouraring_crawl:get_auth_config(), % ToDo: may depend on JobName or TokenPrefix Url = dderl_oauth:get_authorize_url(State#state.xsrf_token, AuthConfig, ?SYNC_OURARING), + ?Info("oura_ring_auth_config ~p ~p",[AuthConfig, Url]), reply(From, #{<<"oura_ring_auth_config">> => #{<<"url">> => Url}}, self()), State; diff --git a/src/dperl/jobs/dpjob_ouraring_crawl.erl b/src/dperl/jobs/dpjob_ouraring_crawl.erl index 3a8bd0f1..b352c4ac 100644 --- a/src/dperl/jobs/dpjob_ouraring_crawl.erl +++ b/src/dperl/jobs/dpjob_ouraring_crawl.erl @@ -14,6 +14,14 @@ scope => "email personal daily"}, "Oura Ring auth config")). +-define(OAUTH2_TOKEN_KEY_PREFIX(__JOB_NAME), + ?GET_CONFIG(oAuth2TokenKeyPrefix, + [__JOB_NAME], + ["dperlJob","OuraRing"], + "Default KeyPrefix for OuraRing token cache" + ) + ). + -define(KEY_PREFIX(__JOB_NAME), ?GET_CONFIG(keyPrefix, [__JOB_NAME], ["healthDevice","OuraRing"], "Default KeyPrefix for Oura Ring data") @@ -52,6 +60,8 @@ , get_auth_config/1 , get_key_prefix/0 , get_key_prefix/1 + , get_auth_token_key_prefix/0 + , get_auth_token_key_prefix/1 ]). % dperl_strategy_scr export @@ -76,6 +86,10 @@ get_key_prefix() -> ?KEY_PREFIX(<<>>). get_key_prefix(JobName) -> ?KEY_PREFIX(JobName). +get_auth_token_key_prefix() -> ?OAUTH2_TOKEN_KEY_PREFIX(<<>>). + +get_auth_token_key_prefix(JobName) -> ?OAUTH2_TOKEN_KEY_PREFIX(JobName). + connect_check_src(#state{is_connected = true} = State) -> {ok, State}; connect_check_src(#state{is_connected=false, accountId=AccountId, key_prefix=KeyPrefix} = State) -> @@ -161,7 +175,7 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel} = DstArgs, ?JInfo("Starting with ~p's enchash...", [AccountId]), imem_enc_mnesia:put_enc_hash(EncHash), KeyPrefix = maps:get(key_prefix, DstArgs, get_key_prefix(Name)), - case dderl_oauth:get_token_info(AccountId, KeyPrefix, ?SYNC_OURARING) of + case dderl_oauth:get_token_info(AccountId, get_auth_token_key_prefix(Name), ?SYNC_OURARING) of #{<<"access_token">> := AccessToken} -> ChannelBin = dperl_dal:to_binary(Channel), dperl_dal:create_check_channel(ChannelBin), From 0e4c55c30e900a82c0d7aef07d162ef008ea7dd7 Mon Sep 17 00:00:00 2001 From: shamis Date: Sun, 14 Jun 2020 18:32:18 +0200 Subject: [PATCH 59/72] replaced os:timestamp --- src/dderl_resource.erl | 4 ++-- src/dderl_saml_handler.erl | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/dderl_resource.erl b/src/dderl_resource.erl index e92f316f..47003359 100644 --- a/src/dderl_resource.erl +++ b/src/dderl_resource.erl @@ -16,7 +16,7 @@ -record(state, {sessionToken}). init(Req0, []) -> - Req = ?COW_REQ_SET_META(reqTime, os:timestamp(), Req0), + Req = ?COW_REQ_SET_META(reqTime, imem_meta:time(), Req0), Req1 = ?COW_REQ_SET_META(accessLog, #{}, Req), ?Debug("Request : ~p", [Req1]), case cowboy_req:has_body(Req1) of @@ -166,7 +166,7 @@ terminate(_Reason, Req, _State) -> RespSize = ?COW_REQ_GET_META(respSize, Req, 0), ReqSize = cowboy_req:body_length(Req), Size = ReqSize + RespSize, - ProcessingTimeMicroS = timer:now_diff(os:timestamp(), ReqTime), + ProcessingTimeMicroS = imem_datatype:musec_diff(ReqTime), catch dderl_access_logger:log(Log#{bytes => Size, time => ProcessingTimeMicroS}). diff --git a/src/dderl_saml_handler.erl b/src/dderl_saml_handler.erl index cc2c2c35..290028eb 100644 --- a/src/dderl_saml_handler.erl +++ b/src/dderl_saml_handler.erl @@ -20,7 +20,7 @@ init(Req0, Args) -> #{app := Application} -> Application; _ -> dderl end, - Req = ?COW_REQ_SET_META(App, reqTime, os:timestamp(), Req0), + Req = ?COW_REQ_SET_META(App, reqTime, imem_meta:time(), Req0), Req1 = ?COW_REQ_SET_META(App, accessLog, #{}, Req), HostUrl = iolist_to_binary(cowboy_req:uri(Req1, #{path => undefined, qs => undefined})), Url = iolist_to_binary(cowboy_req:uri(Req1)), From 9e9026ea453107de926f714b022eec3c913b96ea Mon Sep 17 00:00:00 2001 From: shamis Date: Sun, 14 Jun 2020 18:40:55 +0200 Subject: [PATCH 60/72] updated the comments --- src/dperl/dperl_strategy_scr.erl | 26 +++++++++++--------------- src/dperl/dperl_strategy_scr.hrl | 4 +++- 2 files changed, 14 insertions(+), 16 deletions(-) diff --git a/src/dperl/dperl_strategy_scr.erl b/src/dperl/dperl_strategy_scr.erl index 7ef33736..10dcca99 100644 --- a/src/dperl/dperl_strategy_scr.erl +++ b/src/dperl/dperl_strategy_scr.erl @@ -32,18 +32,16 @@ -callback fetch_dst(scrAnyKey(), scrState()) -> ?NOT_FOUND | scrAnyVal(). % delete one item from destination (if it exists) -% first element in result is true if a delete was unnecessary ???? +% first element in result is true if a delete was unnecessary +% callback is responsible for deciding if no_op delete is an error or not -callback delete_dst(scrAnyKey(), scrState()) -> {scrSoftError(), scrState()}. % insert one item to destination (which is assumed to not exist) -% first element in result is true if the insert was compatible but unnecessary (already -% existed in correct shape) ???? -% alternatively, the callback implementation can (or should) throw if key already exists +% if the callback is able to do an update instead of an insert it can do it responsibly. -callback insert_dst(scrAnyKey(), scrAnyVal(), scrState()) -> {scrSoftError(), scrState()}. % update one item in destination (which is assumed to exist) -% first element in result tuple indicates wether an update was possible (false=insert needed) ???? -% alternatively, the callback implementation can (or should) throw if the key does not exist +% if the callback is able to do an insert instead of an update it can do it responsibly. -callback update_dst(scrAnyKey(), scrAnyVal(), scrState()) -> {scrSoftError(), scrState()}. % allow the callback implementation act upon an error or warning message @@ -74,17 +72,16 @@ -callback is_equal(scrAnyKey(), scrAnyVal(), scrAnyVal(), scrState()) -> true | false. % override for destination channel insert/update/delete (final data change) -% first element of result indicates if anything was changed on the target ???? -% (ignored by behaviour, used for debugging only) +% only used for protected configurations (reversing the direction of data flow) -callback update_channel( scrAnyKey(), IsSamePlatform::boolean(), SourceVal::scrAnyVal(), DestVal::scrAnyVal(), scrState()) -> {true, scrState()} | {false, scrState()}. -% optional cleanup call after end of differential provisioning (sync phase) ???? -% where and when is it exactly invoked ???? -% first element of result indicates if anything was executed on the target ???? +% called at the end of all the sync cycles, for example used for smsc push job +% to push the addresslists. +% optional callback when there is a risk of target inconsistency after individual sync updates. -callback finalize_src_events(scrState()) -> {true, scrState()} | {false, scrState()}. % can be used in callback to suppress the logging of individual sync results @@ -265,8 +262,7 @@ execute(sync, Mod, Job, State, #{sync:=Sync} = Args) -> execute(finish, Mod, Job, State4, Args); %% idle used for dperl_mec_ic to have idle timeout on %% try later error from oracle - %% would be removed in the future when new - %% behavior is used for mec_ic + %% results in calling idle wait instead of always wait {idle, State4} -> execute(idle, Mod, Job, State4, Args) end @@ -533,7 +529,7 @@ set_cycle_state(Cycle, Job, Action, Stats0) {[#dperlNodeJobDyn{state=#{Cycle:=CycleState0}} = NJD], true} when is_map(CycleState0) -> {NJD, CycleState0}; {[#dperlNodeJobDyn{} = NJD], true} -> - {NJD, #{lastAttempt=>os:timestamp(), lastSuccess=>?EPOCH}} % ToDo: Should it be imem_meta:time() ???? + {NJD, #{lastAttempt=>imem_meta:time(), lastSuccess=>?EPOCH}} % ToDo: Should it be imem_meta:time() ???? end, {CycleState2, Stats1} = case maps:get(count, Stats0, '$not_found') of '$not_found' -> {CycleState1, Stats0}; @@ -673,7 +669,7 @@ process_events([Key | Keys], Mod, State, ShouldLog, IsError) -> process_events(Keys, Mod, NewState, ShouldLog, NewIsError). -spec execute_prov_fun(scrOperation(), jobModule(), atom(), list(), boolean(), boolean()) -> - {true | false | idle, scrState() | term()}. + {scrSoftError(), scrState() | term()}. execute_prov_fun(Op, Mod, Fun, Args, ShouldLog, IsError) -> case catch apply(Mod, Fun, Args) of {false, NewState} -> diff --git a/src/dperl/dperl_strategy_scr.hrl b/src/dperl/dperl_strategy_scr.hrl index e5c6e746..8cd95697 100644 --- a/src/dperl/dperl_strategy_scr.hrl +++ b/src/dperl/dperl_strategy_scr.hrl @@ -19,7 +19,9 @@ -type scrMessage() :: binary(). -type scrStatus() :: no_op | {error,term()} | scrMessage(). -type scrOperation() :: finalize_src_events | no_log | string(). % "Protected"|"Deleted"|"Inserted"|"Updated". --type scrSoftError() :: true|false. +-type scrSoftError() :: true|false|idle. % idle only used in special case where the dst is not ready and we have to do an idle wait. + % true signifies the one or more events in the sync cycle resulted in an error + % false means all the events were processed successfully by the sync cycle -type scrMsecInterval() :: integer(). % delays in milli-seconds -type scrHoursOfDay() :: [integer()]. % execute only on these hours, [] = any hour From 9237ac0b0f0ed5f3c7d10b8165cb7520e186a9af Mon Sep 17 00:00:00 2001 From: stoch Date: Thu, 18 Jun 2020 15:14:57 +0200 Subject: [PATCH 61/72] fixing type bug for AccountId in auth registration --- src/dderl_dal.erl | 2 +- src/dperl/jobs/dpjob_office_365.erl | 4 ++-- src/dperl/jobs/dpjob_ouraring_crawl.erl | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/dderl_dal.erl b/src/dderl_dal.erl index 350cddbf..2091d2b5 100644 --- a/src/dderl_dal.erl +++ b/src/dderl_dal.erl @@ -871,7 +871,7 @@ create_check_avatar_channel(AccountId) -> imem_dal_skvh:create_check_channel(avatar_channel_name(AccountId), ?GET_AVATAR_CHANNEL_OPTIONS). write_to_avatar_channel(AccountId, Key, Value) -> - imem_dal_skvh:write(AccountId, avatar_channel_name(AccountId), Key, Value). + imem_dal_skvh:write(AccountId, AccountId, Key, Value). read_from_avatar_channel(AccountId, Key) -> case imem_dal_skvh:read(AccountId, avatar_channel_name(AccountId), [Key]) of diff --git a/src/dperl/jobs/dpjob_office_365.erl b/src/dperl/jobs/dpjob_office_365.erl index 4c7ec228..b6eff30a 100644 --- a/src/dperl/jobs/dpjob_office_365.erl +++ b/src/dperl/jobs/dpjob_office_365.erl @@ -33,9 +33,9 @@ ). -define(OAUTH2_TOKEN_KEY_PREFIX(__JOB_NAME), - ?GET_CONFIG(oAuth2TokenKeyPrefix, + ?GET_CONFIG(oAuth2KeyPrefix, [__JOB_NAME], - ["dperlJob","Office365"], + ["dpjob","Office365"], "Default KeyPrefix for Office365 token cache" ) ). diff --git a/src/dperl/jobs/dpjob_ouraring_crawl.erl b/src/dperl/jobs/dpjob_ouraring_crawl.erl index b352c4ac..3fdbd7ed 100644 --- a/src/dperl/jobs/dpjob_ouraring_crawl.erl +++ b/src/dperl/jobs/dpjob_ouraring_crawl.erl @@ -15,9 +15,9 @@ "Oura Ring auth config")). -define(OAUTH2_TOKEN_KEY_PREFIX(__JOB_NAME), - ?GET_CONFIG(oAuth2TokenKeyPrefix, + ?GET_CONFIG(oAuth2KeyPrefix, [__JOB_NAME], - ["dperlJob","OuraRing"], + ["dpjob","OuraRing"], "Default KeyPrefix for OuraRing token cache" ) ). From 6fc6fb35fde9555c1610e86f44b504c2e50792ec Mon Sep 17 00:00:00 2001 From: stoch Date: Mon, 22 Jun 2020 14:42:28 +0200 Subject: [PATCH 62/72] spec and doc dperl_strategy_scr and dpjob_ouraring_crawl --- src/dderl_dal.erl | 2 +- src/dderl_oauth.erl | 26 +- src/dperl/dperl.hrl | 2 +- src/dperl/dperl_dal.erl | 89 +++++-- src/dperl/dperl_strategy_scr.erl | 59 +++-- src/dperl/jobs/dpjob_office_365.erl | 36 +-- src/dperl/jobs/dpjob_ouraring_crawl.erl | 325 ++++++++++++++---------- 7 files changed, 337 insertions(+), 202 deletions(-) diff --git a/src/dderl_dal.erl b/src/dderl_dal.erl index 2091d2b5..350cddbf 100644 --- a/src/dderl_dal.erl +++ b/src/dderl_dal.erl @@ -871,7 +871,7 @@ create_check_avatar_channel(AccountId) -> imem_dal_skvh:create_check_channel(avatar_channel_name(AccountId), ?GET_AVATAR_CHANNEL_OPTIONS). write_to_avatar_channel(AccountId, Key, Value) -> - imem_dal_skvh:write(AccountId, AccountId, Key, Value). + imem_dal_skvh:write(AccountId, avatar_channel_name(AccountId), Key, Value). read_from_avatar_channel(AccountId, Key) -> case imem_dal_skvh:read(AccountId, avatar_channel_name(AccountId), [Key]) of diff --git a/src/dderl_oauth.erl b/src/dderl_oauth.erl index 03676ee9..d9bef261 100644 --- a/src/dderl_oauth.erl +++ b/src/dderl_oauth.erl @@ -28,6 +28,10 @@ get_authorize_url(XSRFToken, AuthConfig, SyncType) -> "scope" => {enc, Scope}, "state" => {enc, imem_json:encode(State)}}), erlang:iolist_to_binary([Url, "&", UrlParams]). + +%% get token info from web service using the configuration from callback module +%% store it in the avatar table of AccountId under the key TokenPrefix || "#token#" +-spec get_access_token(ddEntityId(), list(), string(), module()) -> ok | {error, term()}. get_access_token(AccountId, TokenPrefix, Code, SyncType) -> AuthConfig = try SyncType:get_auth_config() % ToDo: AuthConfig may depend on JobName or TokenPrefix @@ -36,7 +40,7 @@ get_access_token(AccountId, TokenPrefix, Code, SyncType) -> ?Error("Finding AuthConfig : ~p ñ~p", [E,S]), {error, E} end, - ?Info("get_access_token AuthConfig: ~p",[AuthConfig]), + %?Info("get_access_token AuthConfig: ~p",[AuthConfig]), #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, client_secret := Secret, grant_type := GrantType, scope := Scope} = AuthConfig, @@ -50,21 +54,27 @@ get_access_token(AccountId, TokenPrefix, Code, SyncType) -> set_token_info(AccountId, TokenPrefix, TokenInfo, SyncType), ok; {ok, {{_, Code, _}, _, Error}} -> - ?Error("Fetching access token : ~p:~p", [Code, Error]), {error, Error}; {error, Error} -> ?Error("Fetching access token : ~p", [Error]), {error, Error} end. +%% refresh access token from web service using the configuration from callback module +%% store it in the avatar table of AccountId under the key TokenPrefix || "#token#" +-spec refresh_access_token(ddEntityId(), list(), module()) -> {ok, binary()} | {error, term()}. refresh_access_token(AccountId, TokenPrefix, SyncType) -> - #{token_url := TUrl, client_id := ClientId, scope := Scope, - client_secret := Secret} = SyncType:get_auth_config(), - #{<<"refresh_token">> := RefreshToken} = get_token_info(AccountId, TokenPrefix, SyncType), - Body = dperl_dal:url_enc_params( - #{"client_id" => ClientId, "client_secret" => {enc, Secret}, "scope" => {enc, Scope}, - "refresh_token" => RefreshToken, "grant_type" => "refresh_token"}), + #{token_url:=TUrl, client_id:=ClientId, scope:=Scope, client_secret:=Secret} + = SyncType:get_auth_config(), + %?Info("refresh_access_token ~p ~p ~p",[AccountId, TokenPrefix, SyncType]), + #{<<"refresh_token">>:=RefreshToken} = get_token_info(AccountId, TokenPrefix, SyncType), + Body = dperl_dal:url_enc_params(#{ "client_id"=>ClientId, "client_secret"=>{enc, Secret} + , "scope"=>{enc, Scope}, "refresh_token"=>RefreshToken + , "grant_type"=>"refresh_token"}), ContentType = "application/x-www-form-urlencoded", + %?Info("refresh_access_token TUrl=~p",[TUrl]), + %?Info("refresh_access_token ContentType=~p",[ContentType]), + %?Info("refresh_access_token Body=~p",[Body]), case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {{_, 200, "OK"}, _, TokenBody}} -> TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), diff --git a/src/dperl/dperl.hrl b/src/dperl/dperl.hrl index 1733c417..c3f950e3 100644 --- a/src/dperl/dperl.hrl +++ b/src/dperl/dperl.hrl @@ -6,7 +6,7 @@ -include("../dderl.hrl"). -type jobName() :: binary(). --type jobModule() :: atom(). +-type jobModule() :: module(). -type jobArgs() :: map(). -type jobEnabled() :: true|false. -type jobRunning() :: true|false|undefined. diff --git a/src/dperl/dperl_dal.erl b/src/dperl/dperl_dal.erl index 99271a22..e6a0d791 100644 --- a/src/dperl/dperl_dal.erl +++ b/src/dperl/dperl_dal.erl @@ -2,23 +2,73 @@ -include("dperl.hrl"). --export([select/2, subscribe/1, unsubscribe/1, write/2, check_table/5, - sql_jp_bind/1, sql_bind_jp_values/2, read_channel/2, url_enc_params/1, - io_to_oci_datetime/1, create_check_channel/1, write_channel/3, - read_check_write/4, read_audit_keys/3, read_audit/3, get_enabled/1, - update_job_dyn/2, update_job_dyn/3, job_error_close/1, to_binary/1, - count_sibling_jobs/2, create_check_channel/2, write_protected/6, - get_last_state/1, get_last_audit_time/1, connect_imem_link/4, - remove_from_channel/2, remove_deep/2, data_nodes/0, job_state/1, - job_error/3, job_error/4, job_error_close/0, update_service_dyn/4, - update_service_dyn/3, all_keys/1, read_gt/3, time_str/1, ts_str/1, - safe_json_map/1, read_gt_lt/4, normalize_map/1, read_keys_gt/3, - write_if_different/3, maps_diff/2, read_keys_gt_lt/4, oci_opts/2, - oci_fetch_rows/2, key_from_json/1, disable/1, set_running/2, - read_siblings/2, read_channel_raw/2, worker_error/4, sort_links/1, - get_pool_name/1, remote_dal/3, get_pool_name/2, run_oci_stmt/3, - activity_logger/3, create_check_index/2, to_atom/1, report_status/7, - key_to_json/1, key_to_json_enc/1, create_or_replace_tigger/2]). +-export([activity_logger/3 + ,all_keys/1 + ,check_table/5 + ,connect_imem_link/4 + ,count_sibling_jobs/2 + ,create_check_channel/1 + ,create_check_channel/2 + ,create_check_index/2 + ,create_or_replace_tigger/2 + ,data_nodes/0 + ,disable/1 + ,get_enabled/1 + ,get_last_audit_time/1 + ,get_last_state/1 + ,get_pool_name/1 + ,get_pool_name/2 + ,io_to_oci_datetime/1 + ,job_error/3 + ,job_error/4 + ,job_error_close/0 + ,job_error_close/1 + ,job_state/1 + ,key_from_json/1 + ,key_to_json/1 + ,key_to_json_enc/1 + ,maps_diff/2 + ,normalize_map/1 + ,oci_fetch_rows/2 + ,oci_opts/2 + ,read_audit/3 + ,read_audit_keys/3 + ,read_channel/2 + ,read_channel_raw/2 + ,read_check_write/4 + ,read_gt/3 + ,read_gt_lt/4 + ,read_keys_gt/3 + ,read_keys_gt_lt/4 + ,read_siblings/2 + ,remote_dal/3 + ,remove_deep/2 + ,remove_from_channel/2 + ,report_status/7 + ,run_oci_stmt/3 + ,safe_json_map/1 + ,select/2 + ,set_running/2 + ,sort_links/1 + ,sql_bind_jp_values/2 + ,sql_jp_bind/1 + ,subscribe/1 + ,time_str/1 + ,to_atom/1 + ,to_binary/1 + ,ts_str/1 + ,unsubscribe/1 + ,update_job_dyn/2 + ,update_job_dyn/3 + ,update_service_dyn/3 + ,update_service_dyn/4 + ,url_enc_params/1 + ,worker_error/4 + ,write/2 + ,write_channel/3 + ,write_if_different/3 + ,write_protected/6 + ]). check_table(Table, ColumnNames, ColumnTypes, DefaultRecord, Opts) -> case catch imem_meta:create_check_table( @@ -26,8 +76,9 @@ check_table(Table, ColumnNames, ColumnTypes, DefaultRecord, Opts) -> Opts, system) of {'EXIT', {'ClientError', _} = Reason} -> ?Error("create_check table ~p, ~p", [Table, Reason]); - Else -> - ?Info("create_check table ~p... ~p", [Table, Else]) + _Else -> + %?Info("create_check table ~p... ~p", [Table, _Else]), + ok end. -spec create_check_channel(binary() | list()) -> ok | no_return(). diff --git a/src/dperl/dperl_strategy_scr.erl b/src/dperl/dperl_strategy_scr.erl index 10dcca99..857037aa 100644 --- a/src/dperl/dperl_strategy_scr.erl +++ b/src/dperl/dperl_strategy_scr.erl @@ -1,5 +1,7 @@ -module(dperl_strategy_scr). +%% implements the scr behaviour for sync / cleanup / refresh / cleanupCumRefresh (c/r) jobs + -include("dperl.hrl"). -include("dperl_strategy_scr.hrl"). @@ -18,8 +20,13 @@ % get a key list (limited in length) of recent changes on source side % often achieved by scanning an audit log after last checked timestamp (from state) % basis for sync cycle (differential change provisioning) +% for normal cleanup, a list of (possibly) dirty keys is returned +% for c/r jobs, a list of (possibly) dirty KV pairs is returned -callback get_source_events(scrState(), scrBatchSize()) -> - {error, scrAnyKey()} | {ok, scrAnyKeys(), scrState()} | {ok, sync_complete, scrState()}. + {ok, scrAnyKeys(), scrState()} | + {ok, scrAnyKeyVals(), scrState()} | + {ok, sync_complete, scrState()} | + {error, scrAnyKey()}. % check if destination data is currently acessible -callback connect_check_dst(scrState()) -> @@ -42,7 +49,8 @@ % update one item in destination (which is assumed to exist) % if the callback is able to do an insert instead of an update it can do it responsibly. --callback update_dst(scrAnyKey(), scrAnyVal(), scrState()) -> {scrSoftError(), scrState()}. +% ATTENTION: for c/r jobs the first parameter may be a KV Pair from which only the key is used +-callback update_dst(scrAnyKey()|scrAnyKeyVal(), scrAnyVal(), scrState()) -> {scrSoftError(), scrState()}. % allow the callback implementation act upon an error or warning message % result is ignored, used for debugging only @@ -73,7 +81,7 @@ % override for destination channel insert/update/delete (final data change) % only used for protected configurations (reversing the direction of data flow) --callback update_channel( scrAnyKey(), +-callback update_channel( scrAnyKey() | scrAnyKeyVal(), IsSamePlatform::boolean(), SourceVal::scrAnyVal(), DestVal::scrAnyVal(), @@ -93,35 +101,35 @@ % execute simple cleanup for next batch of keys -callback do_cleanup( scrState(), CleanupBulkCount::scrBatchSize()) -> - {{ok, scrState()} | {ok, finish, scrState() | error, any()}}. + {ok, scrState()} | {ok, finish, scrState()} | {error, term(), scrState()} | {error, term()}. % execute cleanup for found differences (Deletes and Inserts) --callback do_cleanup( Deletes::scrAnyKeys(), - Inserts::scrAnyKeys(), +-callback do_cleanup( Deletes::scrAnyKeys() | scrAnyKeyVal(), + Inserts::scrAnyKeys() | scrAnyKeyVal(), SearchingMatch::boolean(), % NextLastKey == MinKey scrState()) -> - {{ok, scrState()} | {ok, finish, scrState() | error, any()}}. + {ok, scrState()} | {ok, finish, scrState()} | {error, term(), scrState()} | {error, term()}. % execute cleanup/refresh for found differences (Deletes, Inserts and value Diffs) --callback do_cleanup( Deletes::scrAnyKeys(), - Inserts::scrAnyKeys(), - Diffs::scrAnyKeys(), +-callback do_cleanup( Deletes::scrAnyKeys() | scrAnyKeyVal(), + Inserts::scrAnyKeys() | scrAnyKeyVal(), + Diffs::scrAnyKeys() | scrAnyKeyVal(), SearchingMatch::boolean(), % NextLastKey == MinKey scrState()) -> - {{ok, scrState()} | {ok, finish, scrState() | error, any()}}. + {ok, scrState()} | {ok, finish, scrState()} | {error, term(), scrState()} | {error, term()}. % bulk load one batch of keys (for cleanup) or kv-pairs (cleanup/refresh) from source % a callback module implementing this and load_dst_after_key signals that it wants % to fully control the cleanup/refresh procedure --callback load_src_after_key( LastKeySeen::scrAnyKey(), +-callback load_src_after_key( LastKeySeen::scrAnyKey() | scrAnyKeyVal(), scrBatchSize(), scrState()) -> - {ok, scrAnyKeys(), scrState()} | {ok, scrAnyKeyVals(), scrState()} | {error, any(), scrState()}. + {ok, scrAnyKeys(), scrState()} | {ok, scrAnyKeyVals(), scrState()} | {error, term(), scrState()}. % bulk load one batch of keys (for cleanup) or kv-pairs (cleanup/refresh) from destination % a callback module implementing this and load_src_after_key signals that it wants % to fully control the cleanup/refresh procedure --callback load_dst_after_key( LastKeySeen::scrAnyKey(), +-callback load_dst_after_key( LastKeySeen::scrAnyKey() | scrAnyKeyVal(), scrBatchSize(), scrState()) -> {ok, scrAnyKeys(), scrState()} | {ok, scrAnyKeyVals(), scrState()} | {error, any(), scrState()}. @@ -134,17 +142,17 @@ ]). % chunked cleanup context --record(cleanup_ctx,{ srcKeys :: scrAnyKeys() +-record(cleanup_ctx,{ srcKeys :: scrAnyKeys()| scrAnyKeyVal() % ????? , srcCount :: integer() - , dstKeys :: scrAnyKeys() + , dstKeys :: scrAnyKeys()| scrAnyKeyVal() % ????? , dstCount :: integer() , bulkCount :: scrBatchSize() - , minKey :: scrAnyKey() - , maxKey :: scrAnyKey() - , lastKey :: scrAnyKey() - , deletes = [] :: scrAnyKeys() - , inserts = [] :: scrAnyKeys() - , differences = [] :: scrAnyKeys() + , minKey :: scrAnyKey() | scrAnyKeyVal() % ????? + , maxKey :: scrAnyKey() | scrAnyKeyVal() % ????? + , lastKey :: scrAnyKey() | scrAnyKeyVal() % ????? + , deletes = [] :: scrAnyKeys() | scrAnyKeyVals() + , inserts = [] :: scrAnyKeys() | scrAnyKeyVals() + , differences = [] :: scrAnyKeys() | scrAnyKeyVals() }). % Debug macros @@ -195,6 +203,8 @@ execute(Mod, Job, State, #{sync:=_, cleanup:=_, refresh:=_} = Args) when is_map( execute(Mod, Job, State, Args) when is_map(Args) -> execute(Mod, Job, State, maps:merge(#{sync=>true, cleanup=>true, refresh=>true}, Args)). +%% execute one provisioning cycle (round) which can mean one sync, cleanup, refresh or a c/r cycle +%% with priority in this order -spec execute(scrPhase(), jobModule(), jobName(), scrState(), jobArgs()) -> scrState() | no_return(). execute(sync, Mod, Job, State, #{sync:=Sync} = Args) -> % perform a sync cycle @@ -580,7 +590,6 @@ should_refresh(LastAttempt, LastSuccess, BatchInterval, Interval, Hours) -> end end. - -spec is_equal(scrAnyKey(), scrAnyVal(), scrAnyVal(), scrState()) -> boolean(). is_equal(_Key, S, S, _State) -> true; is_equal(_Key, S, D, _State) when is_map(S), is_map(D) -> @@ -602,8 +611,8 @@ sync_log(Msg, {Key, _}, ShouldLog) -> sync_log(Msg, Key, ShouldLog); sync_log(Msg, Key, _) when is_binary(Key) -> ?JInfo("~s : ~s", [Msg, Key]); sync_log(Msg, Key, _) -> ?JInfo("~s : ~p", [Msg, Key]). -%% chunked cleanup - +%% chunked cleanup, process possibly dirty keys (change events/differences) in the list +%% for c/r (where events consist of kv tuples) this must be done in callback module ???? -spec process_events(scrAnyKeys(), jobModule(), scrState()) -> {boolean(), scrState()}. process_events(Keys, Mod, State) -> ShouldLog = case erlang:function_exported(Mod, should_sync_log, 1) of diff --git a/src/dperl/jobs/dpjob_office_365.erl b/src/dperl/jobs/dpjob_office_365.erl index b6eff30a..ea47af89 100644 --- a/src/dperl/jobs/dpjob_office_365.erl +++ b/src/dperl/jobs/dpjob_office_365.erl @@ -9,11 +9,11 @@ -type locKey() :: [string()]. % local key of a contact, e.g. ["contact","My","Ah2hA77a"] -type locId() :: string(). % last item in locKey() is called local id e.g. "Ah2hA77a" -type locVal() :: map(). % local cvalue of a contact, converted to a map for processing --type locBin() :: binary(). % local cvalue of a contact in binary form (often stored like that) +%-type locBin() :: binary(). % local cvalue of a contact in binary form (often stored like that) -type remKey() :: binary(). % remote key of a contact, called <<"id">> in Office365 -type remKeys():: [remKey()]. % list of remKey() type (e.g. DirtyKeys) -type remVal() :: map(). % remote value of a contact (relevant fields only) --type remBin() :: binary(). % remote value of a contract in raw binary JSON form +%-type remBin() :: binary(). % remote value of a contract in raw binary JSON form -type meta() :: map(). % contact meta information with respect to this remote cloud @@ -386,8 +386,8 @@ get_status(#state{}) -> #{}. init_state(_) -> #state{}. -init({#dperlJob{name=Name, srcArgs=#{apiUrl:=ApiUrl}, args=Args, - dstArgs=#{channel:=Channel} = DstArgs}, State}) -> +init({#dperlJob{ name=Name, srcArgs=#{apiUrl:=ApiUrl}, args=Args + , dstArgs=#{channel:=Channel} = DstArgs}, State}) -> case dperl_auth_cache:get_enc_hash(Name) of undefined -> ?JError("Encryption hash is not avaialable"), @@ -396,21 +396,21 @@ init({#dperlJob{name=Name, srcArgs=#{apiUrl:=ApiUrl}, args=Args, ?JInfo("Starting with ~p's enchash...", [AccountId]), imem_enc_mnesia:put_enc_hash(EncHash), KeyPrefix = maps:get(keyPrefix, DstArgs, get_key_prefix(Name)), - TokenPrefix = get_auth_token_key_prefix(Name), + TokenPrefix = maps:get(tokenPrefix, Args, get_auth_token_key_prefix(Name)), + Type = maps:get(type, Args, pull), + ChannelBin = dperl_dal:to_binary(Channel), + dperl_dal:create_check_channel(ChannelBin), + ContactIff = <<"fun() ->imem_index:gen_iff_binterm_list_pattern([\"contact\",'_','_']) end.">>, + PLContact = [{':',<<"id">>, {'#', <<"values">>, {':', <<"META">>, <<"cvalue">>}}}], + IdxContact = #ddIdxDef{ id = ?CONTACT_INDEXID + , name = <<"idx_contact">> + , type = iv_k + , pl = PLContact + , vnf = <<"fun imem_index:vnf_identity/1.">> + , iff = ContactIff}, + dperl_dal:create_check_index(ChannelBin, [IdxContact]), case dderl_oauth:get_token_info(AccountId, TokenPrefix, ?SYNC_OFFICE365) of - #{<<"access_token">> := Token} -> - Type = maps:get(type, Args, pull), - ChannelBin = dperl_dal:to_binary(Channel), - dperl_dal:create_check_channel(ChannelBin), - ContactIff = <<"fun() ->imem_index:gen_iff_binterm_list_pattern([\"contact\",'_','_']) end.">>, - PLContact = [{':',<<"id">>, {'#', <<"values">>, {':', <<"META">>, <<"cvalue">>}}}], - IdxContact = #ddIdxDef{id = ?CONTACT_INDEXID - ,name = <<"idx_contact">> - ,type = iv_k - ,pl = PLContact - ,vnf = <<"fun imem_index:vnf_identity/1.">> - ,iff = ContactIff}, - dperl_dal:create_check_index(ChannelBin, [IdxContact]), + #{<<"access_token">>:=Token} -> {ok, State#state{ name=Name, type=Type, channel=ChannelBin, keyPrefix=KeyPrefix , apiUrl=ApiUrl, tokenPrefix=TokenPrefix , token=Token, accountId = AccountId diff --git a/src/dperl/jobs/dpjob_ouraring_crawl.erl b/src/dperl/jobs/dpjob_ouraring_crawl.erl index 3fdbd7ed..5d8f7233 100644 --- a/src/dperl/jobs/dpjob_ouraring_crawl.erl +++ b/src/dperl/jobs/dpjob_ouraring_crawl.erl @@ -1,11 +1,35 @@ -module(dpjob_ouraring_crawl). +%% implements scr puller callback for OuraRing cloud data as a cleanupCumRefresh job (c/r) +%% avoids pulling incomplete intra-day data by not pulling beyond yesterday's data +%% the default "OuraRing" identifier can be altered to camouflage the nature of the data +%% this also supports multiple OuraRing pullers into the same table, if needed + -include("../dperl.hrl"). +-include("../dperl_strategy_scr.hrl"). -behavior(dperl_worker). -behavior(dperl_strategy_scr). --define(AUTH_CONFIG(__JOB_NAME), +-type metric() :: string(). % "userinfo" | "sleep" | "readiness" | "activity". +-type locKey() :: [string()]. % local key of a metric, e.g. ["healthDevice","OuraRing","sleep"] +-type locVal() :: map(). % local cvalue of a metric, converted to a map for processing +-type locKVP() :: {locKey(), locVal()}. % local key value pair +-type year() :: non_neg_integer(). +-type month() :: 1..12. +-type day() :: 1..31. +-type date() :: {year(), month(), day()}. +-type stringDate() :: string(). % "YYYY-MM-DD" +-type dayDate() :: date() | stringDate(). +-type maybeDate() :: undefined | date(). +-type status() :: #{ lastSleepDay := maybeDate() + , lastActivityDay := maybeDate() + , lastReadinessDay := maybeDate()}. % relevant cleanup status +-type token() :: binary(). % OAuth access token (refreshed after 'unauthorized') + +-define(METRICS, ["userinfo", "activity", "readiness", "sleep"]). + +-define(OAUTH2_CONFIG(__JOB_NAME), ?GET_CONFIG(oAuth2Config,[__JOB_NAME], #{auth_url =>"https://cloud.ouraring.com/oauth/authorize?response_type=code", client_id => "12345", redirect_uri => "https://localhost:8443/dderl/", @@ -32,17 +56,19 @@ "Days to be shifted backwards for starting the job") ). --record(state, { name - , channel - , is_connected = true - , access_token - , api_url - , last_sleep_day - , last_activity_day - , last_readiness_day - , infos = [] - , key_prefix - , accountId +-record(state, { name :: jobName() + , type = pull :: scrDirection() % constant here + , channel :: scrChannel() % channel name + , keyPrefix :: locKey() % key space prefix in channel + , tokenPrefix :: locKey() % without id #token# + , token :: token() % token binary string + , apiUrl :: string() + , isConnected = true :: boolean() % provokes unauthorized in first cycle + , lastSleepDay :: maybeDate() + , lastActivityDay :: maybeDate() + , lastReadinessDay :: maybeDate() + , cycleBuffer = [] :: [locKVP()] % dirty buffer for one c/r cycle + , accountId :: ddEntityId() }). % dperl_worker exports @@ -78,35 +104,46 @@ , report_status/3 ]). -get_auth_config() -> ?AUTH_CONFIG(<<>>). +-spec get_auth_config() -> map(). +get_auth_config() -> ?OAUTH2_CONFIG(<<>>). + +-spec get_auth_config(jobName()) -> map(). +get_auth_config(JobName) -> ?OAUTH2_CONFIG(JobName). -get_auth_config(JobName) -> ?AUTH_CONFIG(JobName). +-spec get_auth_token_key_prefix() -> locKey(). +get_auth_token_key_prefix() -> ?OAUTH2_TOKEN_KEY_PREFIX(<<>>). +-spec get_auth_token_key_prefix(jobName()) -> locKey(). +get_auth_token_key_prefix(JobName) -> ?OAUTH2_TOKEN_KEY_PREFIX(JobName). + +-spec get_key_prefix() -> locKey(). get_key_prefix() -> ?KEY_PREFIX(<<>>). +-spec get_key_prefix(jobName()) -> locKey(). get_key_prefix(JobName) -> ?KEY_PREFIX(JobName). -get_auth_token_key_prefix() -> ?OAUTH2_TOKEN_KEY_PREFIX(<<>>). -get_auth_token_key_prefix(JobName) -> ?OAUTH2_TOKEN_KEY_PREFIX(JobName). - -connect_check_src(#state{is_connected = true} = State) -> +-spec connect_check_src(#state{}) -> {ok, #state{}} | {error, term(), #state{}}. +connect_check_src(#state{isConnected = true} = State) -> {ok, State}; -connect_check_src(#state{is_connected=false, accountId=AccountId, key_prefix=KeyPrefix} = State) -> +connect_check_src(#state{isConnected=false, accountId=AccountId, tokenPrefix=TokenPrefix} = State) -> ?JTrace("Refreshing access token"), - case dderl_oauth:refresh_access_token(AccountId, KeyPrefix, ?SYNC_OURARING) of - {ok, AccessToken} -> - ?Info("new access token fetched"), - {ok, State#state{access_token=AccessToken, is_connected=true}}; + case dderl_oauth:refresh_access_token(AccountId, TokenPrefix, ?SYNC_OURARING) of + {ok, Token} -> + %?Info("new access token fetched"), + {ok, State#state{token=Token, isConnected=true}}; {error, Error} -> ?JError("Unexpected response : ~p", [Error]), {error, Error, State} end. -get_source_events(#state{infos = []} = State, _BulkSize) -> +%% get CycleBuffer (dirty kv-pairs as detected in this c/r cycle) +-spec get_source_events(#state{}, scrBatchSize()) -> + {ok, [{locKey(),locVal()}], #state{}} | {ok, sync_complete, #state{}}. +get_source_events(#state{cycleBuffer=[]} = State, _BulkSize) -> {ok, sync_complete, State}; -get_source_events(#state{infos = Infos} = State, _BulkSize) -> - {ok, Infos, State#state{infos = []}}. +get_source_events(#state{cycleBuffer=CycleBuffer} = State, _BulkSize) -> + {ok, CycleBuffer, State#state{cycleBuffer=[]}}. connect_check_dst(State) -> {ok, State}. @@ -114,59 +151,71 @@ do_refresh(_State, _BulkSize) -> {error, cleanup_only}. fetch_src({_Key, Value}, _State) -> Value. -fetch_dst({Key, _}, State) -> - dperl_dal:read_channel(State#state.channel, Key). +-spec fetch_dst(locKey(), #state{}) -> ?NOT_FOUND | locVal(). +fetch_dst({Key, _}, #state{channel=Channel}) -> + dperl_dal:read_channel(Channel, Key). +-spec insert_dst(locKey(), locVal(), #state{}) -> {scrSoftError(), #state{}}. insert_dst(Key, Val, State) -> update_dst(Key, Val, State). report_status(_Key, _Status, _State) -> no_op. -do_cleanup(State, _BlkCount) -> - Types = ["sleep", "activity", "readiness", "userinfo"], - case fetch_metrics(Types, State) of +-spec do_cleanup(#state{}, scrBatchSize()) -> + {ok, #state{}} | {ok, finish, #state{}} | {error, term(), #state{}}. +do_cleanup(#state{cycleBuffer=CycleBuffer} = State, _BlkCount) -> + case fetch_metrics(?METRICS, State) of {ok, State2} -> - case State2#state.infos of - [_] -> + case CycleBuffer of + [_] -> % only userinfo remains. we are done {ok, finish, State2}; - _ -> + _ -> % other items in the list, continue {ok, State2} end; {error, Error} -> - {error, Error, State#state{is_connected = false}} + {error, Error, State#state{isConnected=false}} end. +-spec delete_dst(locKey(), #state{}) -> {scrSoftError(), #state{}}. delete_dst(Key, #state{channel = Channel} = State) -> ?JInfo("Deleting : ~p", [Key]), dperl_dal:remove_from_channel(Channel, Key), {false, State}. +-spec update_dst(locKey() | locKVP(), locVal(), #state{}) -> {scrSoftError(), #state{}}. update_dst({Key, _}, Val, State) -> update_dst(Key, Val, State); -update_dst(Key, Val, #state{channel = Channel} = State) when is_binary(Val) -> +update_dst(Key, Val, #state{channel=Channel} = State) when is_binary(Val) -> dperl_dal:write_channel(Channel, Key, Val), {false, State}; update_dst(Key, Val, State) -> update_dst(Key, imem_json:encode(Val), State). -get_status(#state{last_sleep_day = LastSleepDay, - last_activity_day = LastActivityDay, - last_readiness_day = LastReadinessDay}) -> - #{lastSleepDay => LastSleepDay, lastActivityDay => LastActivityDay, - lastReadinessDay => LastReadinessDay}. +-spec get_status(#state{}) -> status(). +get_status(#state{ lastSleepDay=LastSleepDay, lastActivityDay=LastActivityDay + , lastReadinessDay=LastReadinessDay}) -> + #{ lastSleepDay=>LastSleepDay + , lastActivityDay=>LastActivityDay + , lastReadinessDay=>LastReadinessDay}. +%% (partially) initialize job state from status info in first matching +%% jobDyn table (from all available nodes) +-spec init_state([#dperlNodeJobDyn{}]) -> #state{}. init_state([]) -> #state{}; init_state([#dperlNodeJobDyn{state = State} | _]) -> LastSleepDay = maps:get(lastSleepDay, State, undefined), LastActivityDay = maps:get(lastActivityDay, State, undefined), LastReadinessDay = maps:get(lastReadinessDay, State, undefined), - #state{last_sleep_day = LastSleepDay, last_activity_day = LastActivityDay, - last_readiness_day = LastReadinessDay}; -init_state([_ | Others]) -> - init_state(Others). - -init({#dperlJob{name=Name, dstArgs = #{channel := Channel} = DstArgs, - srcArgs = #{api_url := ApiUrl}}, State}) -> + #state{ lastSleepDay=LastSleepDay + , lastActivityDay=LastActivityDay + , lastReadinessDay = LastReadinessDay}; +init_state([_|Others]) -> init_state(Others). + +%% fully initialize a job using the job config and the partially filled +%% state record (see init_state) derived from nodeJobDyn entries +-spec init({#dperlJob{}, #state{}}) -> {ok, #state{}} | {stop, badarg}. +init({#dperlJob{ name=Name, dstArgs=#{channel:=Channel} = DstArgs, args=Args + , srcArgs=#{apiUrl:=ApiUrl}}, State}) -> case dperl_auth_cache:get_enc_hash(Name) of undefined -> ?JError("Encryption hash is not avaialable"), @@ -174,13 +223,15 @@ init({#dperlJob{name=Name, dstArgs = #{channel := Channel} = DstArgs, {AccountId, EncHash} -> ?JInfo("Starting with ~p's enchash...", [AccountId]), imem_enc_mnesia:put_enc_hash(EncHash), - KeyPrefix = maps:get(key_prefix, DstArgs, get_key_prefix(Name)), - case dderl_oauth:get_token_info(AccountId, get_auth_token_key_prefix(Name), ?SYNC_OURARING) of - #{<<"access_token">> := AccessToken} -> - ChannelBin = dperl_dal:to_binary(Channel), - dperl_dal:create_check_channel(ChannelBin), - {ok, State#state{channel=ChannelBin, api_url=ApiUrl, accountId=AccountId, - key_prefix=KeyPrefix, access_token=AccessToken}}; + KeyPrefix = maps:get(keyPrefix, DstArgs, get_key_prefix(Name)), + TokenPrefix = maps:get(tokenPrefix, Args, get_auth_token_key_prefix(Name)), + ChannelBin = dperl_dal:to_binary(Channel), + dperl_dal:create_check_channel(ChannelBin), + case dderl_oauth:get_token_info(AccountId, TokenPrefix, ?SYNC_OURARING) of + #{<<"access_token">> := Token} -> + {ok, State#state{ name=Name, channel=ChannelBin, keyPrefix=KeyPrefix + , apiUrl=ApiUrl, tokenPrefix=TokenPrefix, token=Token + , accountId = AccountId}}; _ -> ?JError("Access token not found for KeyPrefix ~p",[KeyPrefix]), {stop, badarg} @@ -208,92 +259,105 @@ terminate(Reason, _State) -> %% private functions +%% perform one fetch round for all desired metrics +%% fetch one value per metric for its respective due date +%% skip to next metric, if metric is not available or if all values are fetched +%% aggregate kv into cycleBuffer in state to be processed/stored per round +-spec fetch_metrics([metric()], #state{}) -> {ok, #state{}} | {error, term()}. fetch_metrics([], State) -> {ok, State}; -fetch_metrics(["userinfo" | Types], State) -> +fetch_metrics(["userinfo"|Metrics], State) -> case fetch_userinfo(State) of - {error, Error} -> - {error, Error}; - State1 -> - fetch_metrics(Types, State1) + {error, Error} -> {error, Error}; + {ok, State1} -> fetch_metrics(Metrics, State1) end; -fetch_metrics([Type | Types], State) -> - case get_day(Type, State) of +fetch_metrics([Metric|Metrics], #state{cycleBuffer=CycleBuffer} = State) -> + case get_day(Metric, State) of fetched -> - fetch_metrics(Types, State); + fetch_metrics(Metrics, State); Day -> - case fetch_metric(Type, Day, State) of + case fetch_metric(Metric, Day, State) of {error, Error} -> {error, Error}; none -> - fetch_metrics(Types, State); - {ok, MDay, Metric} -> - State1 = set_metric_day(Type, MDay, State#state{infos = [Metric | State#state.infos]}), - fetch_metrics(Types, State1) + fetch_metrics(Metrics, State); + {ok, MDay, KVP} -> + State1 = set_metric_day(Metric, MDay, State#state{cycleBuffer=[KVP|CycleBuffer]}), + fetch_metrics(Metrics, State1) end end. -fetch_metric(Type, Day, #state{api_url = ApiUrl, access_token = AccessToken} = State) -> - ?JInfo("Fetching metric for ~s on ~p", [Type, Day]), +%% fetch metric value for Day from cloud service, if it exists +%% values may be missing for certain days in which case a 'start_day_query' is used which gives +%% the first data after the missing day +-spec fetch_metric(metric(), date(), #state{}) -> {ok, date(), locKVP()} | none | {error, term()}. +fetch_metric(Metric, Day, #state{keyPrefix=KeyPrefix, apiUrl=ApiUrl, token=Token} = State) -> + ?JInfo("Fetching metric for ~s on ~p", [Metric, Day]), NextDay = next_day(Day), - case fetch_metric(Type, day_query(Day), ApiUrl, AccessToken) of + case fetch_metric(Metric, day_query(Day), ApiUrl, Token) of none -> - case fetch_metric(Type, start_day_query(NextDay), ApiUrl, AccessToken) of - {ok, _} -> - fetch_metric(Type, NextDay, State); - _Other -> - none + case fetch_metric(Metric, start_day_query(NextDay), ApiUrl, Token) of + {ok, _} -> fetch_metric(Metric, NextDay, State); + _Other -> none end; - {ok, Metric} -> - Key = build_key(Type, State#state.key_prefix), - Info = {Key, Metric#{<<"_day">> => list_to_binary(edate:date_to_string(Day))}}, - case Type of - Type when Type == "sleep"; Type == "readiness" -> - {ok, Day, Info}; + {ok, Value} -> + Key = build_key(KeyPrefix, Metric), + KVP = {Key, Value#{<<"_day">> => list_to_binary(edate:date_to_string(Day))}}, + case Metric of + Metric when Metric=="sleep"; Metric=="readiness" -> + {ok, Day, KVP}; "activity" -> - % fetching activity only if next days data exists - case fetch_metric(Type, start_day_query(NextDay), ApiUrl, AccessToken) of - {ok, _} -> - {ok, Day, Info}; - Other -> - Other + % fetching activity only if next days activity data (partially) exists + case fetch_metric(Metric, start_day_query(NextDay), ApiUrl, Token) of + {ok, _} -> {ok, Day, KVP}; + none -> none; + {error, Error} -> {error, Error} end end; {error, Error} -> - ?JError("Error fetching ~s for ~p : ~p", [Type, Day, Error]), + ?JError("Error fetching ~s for ~p : ~p", [Metric, Day, Error]), {error, Error} end. -fetch_metric(Type, DayQuery, ApiUrl, AccessToken) -> - Url = ApiUrl ++ Type ++ DayQuery, - TypeBin = list_to_binary(Type), - case exec_req(Url, AccessToken) of - #{TypeBin := []} -> - none; - Metric when is_map(Metric) -> - {ok, Metric}; - {error, Error} -> - {error, Error} +%% fetch metric data from Oura cloud by metric and date condition (given as url parameter string) +-spec fetch_metric(metric(), string(), string(), token()) -> {ok, locVal()} | none | {error, term()}. +fetch_metric(Metric, DayQuery, ApiUrl, Token) -> + Url = ApiUrl ++ Metric ++ DayQuery, + MetricBin = list_to_binary(Metric), + case exec_req(Url, Token) of + #{MetricBin:=[]} -> none; + Value when is_map(Value) -> {ok, Value}; + {error, Error} -> {error, Error} end. -fetch_userinfo(#state{api_url = ApiUrl, access_token = AccessToken} = State) -> - case exec_req(ApiUrl ++ "userinfo", AccessToken) of +%% fetch userinfo from Oura cloud and add it to the CycleBuffer +%% userinfo comes as latest value only, no day history available +-spec fetch_userinfo(#state{}) ->{ok, #state{}} | {error, term()}. +fetch_userinfo(#state{keyPrefix=KeyPrefix, apiUrl=ApiUrl, token=Token, cycleBuffer=CycleBuffer} = State) -> + case exec_req(ApiUrl ++ "userinfo", Token) of UserInfo when is_map(UserInfo) -> - Info = {build_key("userinfo", State#state.key_prefix), UserInfo}, - State#state{infos = [Info | State#state.infos]}; + KVP = {build_key(KeyPrefix, "userinfo"), UserInfo}, + {ok, State#state{cycleBuffer=[KVP|CycleBuffer]}}; {error, Error} -> ?JError("Error fetching userinfo : ~p", [Error]), {error, Error} end. -get_day(Type, State) -> - LastDay = get_last_day(Type, State), - Key = build_key(Type, State#state.key_prefix), +%% evaluate next day to fetch or 'fetched' if done +%% increment the _day value for the current metric but not beyond yesterday +%% if current metric does not exist in avatar, it may need to be initialized +%% in the past as configured in ?SHIFT_DAYS, else use last day previously fetched +%% day for this metric according to jobDyn state and increment by one day +%% re-fetch yesterday if not there +-spec get_day(metric(), #state{}) -> date() | fetched. +get_day(Metric, #state{name=Name, keyPrefix=KeyPrefix, channel=Channel} = State) -> + LastDay = get_last_day(Metric, State), + Key = build_key(KeyPrefix, Metric), Yesterday = edate:yesterday(), - case dperl_dal:read_channel(State#state.channel, Key) of + case dperl_dal:read_channel(Channel, Key) of ?NOT_FOUND -> case LastDay of undefined -> - SDays = ?SHIFT_DAYS(State#state.name), + SDays = ?SHIFT_DAYS(Name), edate:shift(-1 * SDays, days); Yesterday -> Yesterday; @@ -309,8 +373,10 @@ get_day(Type, State) -> end end. -exec_req(Url, AccessToken) -> - AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(AccessToken)}], +%% request a map response from web service (local value to be stored in avatar) +-spec exec_req(string(),token()) -> locVal() | {error, term()}. +exec_req(Url, Token) -> + AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(Token)}], case httpc:request(get, {Url, AuthHeader}, [], []) of {ok, {{_, 200, "OK"}, _, Result}} -> imem_json:decode(list_to_binary(Result), [return_maps]); @@ -321,29 +387,28 @@ exec_req(Url, AccessToken) -> {error, Error} end. -next_day(Day) when is_list(Day) -> - next_day(edate:string_to_date(Day)); -next_day(Day) when is_tuple(Day) -> - edate:shift(Day, 1, day). +-spec next_day(dayDate()) -> date(). +next_day(Day) when is_list(Day) -> (edate:string_to_date(Day)); +next_day(Day) when is_tuple(Day) -> edate:shift(Day, 1, day). -day_query(Day) when is_tuple(Day) -> - day_query(edate:date_to_string(Day)); -day_query(Day) when is_list(Day) -> - "?start=" ++ Day ++ "&end=" ++ Day. +-spec day_query(dayDate()) -> string(). +day_query(Day) when is_tuple(Day) -> day_query(edate:date_to_string(Day)); +day_query(Day) when is_list(Day) -> "?start=" ++ Day ++ "&end=" ++ Day. -start_day_query(Day) when is_tuple(Day) -> - start_day_query(edate:date_to_string(Day)); -start_day_query(Day) when is_list(Day) -> - "?start=" ++ Day. +-spec start_day_query(dayDate()) -> string(). +start_day_query(Day) when is_tuple(Day) -> start_day_query(edate:date_to_string(Day)); +start_day_query(Day) when is_list(Day) -> "?start=" ++ Day. -get_last_day("sleep", #state{last_sleep_day = LastSleepDay}) -> LastSleepDay; -get_last_day("activity", #state{last_activity_day = LastActivityDay}) -> LastActivityDay; -get_last_day("readiness", #state{last_readiness_day = LastReadinessDay}) -> LastReadinessDay. +-spec get_last_day(metric(), #state{}) -> maybeDate(). +get_last_day("sleep", #state{lastSleepDay=LastSleepDay}) -> LastSleepDay; +get_last_day("activity", #state{lastActivityDay=LastActivityDay}) -> LastActivityDay; +get_last_day("readiness", #state{lastReadinessDay=LastReadinessDay}) -> LastReadinessDay. -set_metric_day("sleep", Day, State) -> State#state{last_sleep_day = Day}; -set_metric_day("activity", Day, State) -> State#state{last_activity_day = Day}; -set_metric_day("readiness", Day, State) -> State#state{last_readiness_day = Day}. +-spec set_metric_day(metric(), maybeDate(), #state{}) -> #state{}. +set_metric_day("sleep", Day, State) -> State#state{lastSleepDay=Day}; +set_metric_day("activity", Day, State) -> State#state{lastActivityDay=Day}; +set_metric_day("readiness", Day, State) -> State#state{lastReadinessDay=Day}. -build_key(Type, KeyPrefix) when is_list(Type), is_list(KeyPrefix)-> - KeyPrefix ++ [Type]. +-spec build_key(locKey(), metric()) -> locKey(). +build_key(KeyPrefix, Metric) when is_list(Metric), is_list(KeyPrefix) -> KeyPrefix ++ [Metric]. From 2f9d6997dccf0cc0c9ed7cd70073fe47afc4217d Mon Sep 17 00:00:00 2001 From: stoch Date: Mon, 29 Jun 2020 18:16:30 +0200 Subject: [PATCH 63/72] reworking Office365 puller --- src/dderl_oauth.erl | 25 +- src/dderl_session.erl | 2 +- src/dperl/dperl_dal.erl | 108 +++-- src/dperl/dperl_strategy_scr.erl | 402 +++++++++------- src/dperl/dperl_strategy_scr.hrl | 17 +- src/dperl/jobs/dpjob_office_365.erl | 598 ++++++++++++++---------- src/dperl/jobs/dpjob_ouraring_crawl.erl | 30 +- 7 files changed, 685 insertions(+), 497 deletions(-) diff --git a/src/dderl_oauth.erl b/src/dderl_oauth.erl index d9bef261..438ac01b 100644 --- a/src/dderl_oauth.erl +++ b/src/dderl_oauth.erl @@ -24,8 +24,8 @@ get_authorize_url(XSRFToken, AuthConfig, SyncType) -> State = #{xsrfToken => XSRFToken, type => SyncType}, #{auth_url:=Url, client_id:=ClientId, redirect_uri:=RedirectURI, scope:=Scope} = AuthConfig, UrlParams = dperl_dal:url_enc_params( - #{"client_id" => ClientId, "redirect_uri" => {enc, RedirectURI}, - "scope" => {enc, Scope}, "state" => {enc, imem_json:encode(State)}}), + #{"client_id" => ClientId, "redirect_uri" => {enc, RedirectURI} + ,"scope" => {enc, Scope}, "state" => {enc, imem_json:encode(State)}}), erlang:iolist_to_binary([Url, "&", UrlParams]). @@ -41,13 +41,13 @@ get_access_token(AccountId, TokenPrefix, Code, SyncType) -> {error, E} end, %?Info("get_access_token AuthConfig: ~p",[AuthConfig]), - #{token_url := TUrl, client_id := ClientId, redirect_uri := RedirectURI, - client_secret := Secret, grant_type := GrantType, - scope := Scope} = AuthConfig, + #{token_url:=TUrl, client_id:=ClientId, redirect_uri:=RedirectURI + ,client_secret:=Secret, grant_type:=GrantType + ,scope := Scope} = AuthConfig, Body = dperl_dal:url_enc_params( - #{"client_id" => ClientId, "scope" => {enc, Scope}, "code" => Code, - "redirect_uri" => {enc, RedirectURI}, "grant_type" => GrantType, - "client_secret" => {enc, Secret}}), + #{ "client_id" => ClientId, "scope" => {enc, Scope}, "code" => Code + , "redirect_uri" => {enc, RedirectURI}, "grant_type" => GrantType + , "client_secret" => {enc, Secret}}), ContentType = "application/x-www-form-urlencoded", case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {{_, 200, "OK"}, _, TokenInfo}} -> @@ -66,15 +66,16 @@ get_access_token(AccountId, TokenPrefix, Code, SyncType) -> refresh_access_token(AccountId, TokenPrefix, SyncType) -> #{token_url:=TUrl, client_id:=ClientId, scope:=Scope, client_secret:=Secret} = SyncType:get_auth_config(), - %?Info("refresh_access_token ~p ~p ~p",[AccountId, TokenPrefix, SyncType]), + ?Info("refresh_access_token ~p ~p ~p",[AccountId, TokenPrefix, SyncType]), #{<<"refresh_token">>:=RefreshToken} = get_token_info(AccountId, TokenPrefix, SyncType), Body = dperl_dal:url_enc_params(#{ "client_id"=>ClientId, "client_secret"=>{enc, Secret} , "scope"=>{enc, Scope}, "refresh_token"=>RefreshToken , "grant_type"=>"refresh_token"}), ContentType = "application/x-www-form-urlencoded", - %?Info("refresh_access_token TUrl=~p",[TUrl]), - %?Info("refresh_access_token ContentType=~p",[ContentType]), - %?Info("refresh_access_token Body=~p",[Body]), + ?Info("refresh_access_token TUrl=~p",[TUrl]), + ?Info("refresh_access_token ContentType=~p",[ContentType]), + ?Info("refresh_access_token Body=~p",[Body]), + ?Info("refresh_access_token RefreshToken=~p",[RefreshToken]), case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {{_, 200, "OK"}, _, TokenBody}} -> TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), diff --git a/src/dderl_session.erl b/src/dderl_session.erl index e07b87ca..2b2e0100 100644 --- a/src/dderl_session.erl +++ b/src/dderl_session.erl @@ -400,7 +400,7 @@ process_call({[<<"oauth2_callback">>], ReqData}, _Adapter, From, {SrcIp, _}, Sta #{<<"oauth2_callback">> := #{<<"code">> := Code, <<"state">> := #{<<"type">> := SyncType}}} = jsx:decode(ReqData, [return_maps]), ?Info("oauth2_callback SyncType: ~p Code: ~p",[SyncType, Code]), - % ToDo: Check if this data can this be trusted + % ToDo: Check if this data can be trusted {SyncHandler,TokenPrefix} = try SH = binary_to_existing_atom(SyncType,utf8), {SH,SH:get_auth_token_key_prefix()} % ToDo: may depend on JobName or TokenPrefix diff --git a/src/dperl/dperl_dal.erl b/src/dperl/dperl_dal.erl index e6a0d791..383c6dda 100644 --- a/src/dperl/dperl_dal.erl +++ b/src/dperl/dperl_dal.erl @@ -66,8 +66,12 @@ ,worker_error/4 ,write/2 ,write_channel/3 + ,write_channel_no_audit/3 ,write_if_different/3 ,write_protected/6 + ,read_channel_index/2 + ,read_channel_index_key/2 + ,read_channel_index_key_prefix/3 ]). check_table(Table, ColumnNames, ColumnTypes, DefaultRecord, Opts) -> @@ -115,6 +119,16 @@ write_channel(Channel, Key, Val) when is_map(Val); byte_size(Val) > 0 -> Other -> {error, Other} end. +-spec write_channel_no_audit(binary(), any(), any()) -> ok | {error, any()}. +write_channel_no_audit(Channel, Key, Val) when is_map(Val); byte_size(Val) > 0 -> + case catch imem_dal_skvh:write(system, Channel, Key, Val) of + Res when is_map(Res) -> ok; + {'EXIT', Error} -> {error, Error}; + {error, Error} -> {error, Error}; + Other -> {error, Other} + end. + + write_if_different(Channel, Key, Val) -> case imem_dal_skvh:read(system, Channel, [Key]) of [#{cvalue := Val}] -> no_op; @@ -130,6 +144,16 @@ read_channel(Channel, Key) when is_binary(Channel) -> _ -> ?NOT_FOUND end. +read_channel_index(Channel, Stu) -> + imem_meta:read(imem_meta:index_table(Channel), Stu). + +read_channel_index_key(Channel, Stu) -> + [sext:decode(Row#ddIndex.lnk) || Row <- read_channel_index(Channel, Stu)]. + +read_channel_index_key_prefix(Channel, Stu, Prefix) -> + F = fun(X) -> lists:prefix(Prefix,X) end, + lists:filter(F, read_channel_index_key(Channel, Stu)). + read_channel_raw(Channel, Key) when is_list(Channel) -> read_channel_raw(list_to_binary(Channel), Key); read_channel_raw(Channel, Key) when is_binary(Channel) -> @@ -313,18 +337,16 @@ update_job_dyn(JobName, State) when is_binary(JobName) andalso is_map(State) -> imem_meta:transaction(fun() -> case imem_meta:read(?JOBDYN_TABLE, JobName) of [] -> - ok = imem_meta:write( - ?JOBDYN_TABLE, - #dperlNodeJobDyn{name = JobName, state = State, - status = synced, - statusTime = imem_meta:time_uid()}); + ok = imem_meta:write(?JOBDYN_TABLE, + #dperlNodeJobDyn{ name=JobName, state=State, status=synced + , statusTime = imem_meta:time_uid()}); [#dperlNodeJobDyn{state=OldState} = J] -> NewState = maps:merge(OldState, State), - if OldState /= NewState -> - ok = imem_meta:write( - ?JOBDYN_TABLE, - J#dperlNodeJobDyn{state = NewState, - statusTime = imem_meta:time_uid()}); + if + OldState /= NewState -> + ok = imem_meta:write(?JOBDYN_TABLE, + J#dperlNodeJobDyn{ state=State + , statusTime=imem_meta:time_uid()}); true -> ok end end end); @@ -335,45 +357,44 @@ update_job_dyn(JobName, Status) when is_binary(JobName) andalso Status == idle orelse Status == error orelse Status == stopped) -> case imem_meta:read(?JOBDYN_TABLE, JobName) of [] -> - ok = imem_meta:write( - ?JOBDYN_TABLE, - #dperlNodeJobDyn{name = JobName, state = #{}, - status = Status, - statusTime = imem_meta:time_uid()}); + ok = imem_meta:write(?JOBDYN_TABLE, + #dperlNodeJobDyn{ name=JobName, state=#{} + , status=Status + , statusTime=imem_meta:time_uid()}); [#dperlNodeJobDyn{status = OldStatus} = J] -> - if OldStatus /= Status orelse - (OldStatus == Status andalso Status == error) -> - ok = imem_meta:write( - ?JOBDYN_TABLE, - J#dperlNodeJobDyn{status = Status, - statusTime = imem_meta:time_uid()}); - true -> ok + if + OldStatus /= Status orelse + (OldStatus == Status andalso Status == error) -> + ok = imem_meta:write(?JOBDYN_TABLE, + J#dperlNodeJobDyn{ status=Status + , statusTime=imem_meta:time_uid()}); + true -> ok end end. -update_job_dyn(JobName, State, Status) when is_binary(JobName) andalso is_map(State) andalso +update_job_dyn(JobName, State, Status) + when is_binary(JobName) andalso is_map(State) andalso (Status == synced orelse Status == undefined orelse Status == cleaning orelse Status == cleaned orelse Status == refreshing orelse Status == refreshed orelse Status == idle orelse Status == error orelse Status == stopped) -> case imem_meta:read(?JOBDYN_TABLE, JobName) of [] -> - ok = imem_meta:write( - ?JOBDYN_TABLE, - #dperlNodeJobDyn{name = JobName, state = State, - status = Status, - statusTime = imem_meta:time_uid()}); - [#dperlNodeJobDyn{state=OldState, status=OldStatus, statusTime = OTime} = J] -> + ok = imem_meta:write(?JOBDYN_TABLE, + #dperlNodeJobDyn{ name=JobName, state=State + , status=Status + , statusTime=imem_meta:time_uid()}); + [#dperlNodeJobDyn{state=OldState, status=OldStatus, statusTime=OTime} = J] -> NewState = maps:merge(OldState,State), TimeDiff = imem_datatype:sec_diff(OTime), - if NewState /= OldState orelse (OldStatus == error andalso Status /= idle) - orelse (OldStatus /= error andalso Status /= OldStatus) - orelse (OldStatus == Status andalso Status == idle) - orelse TimeDiff > 1 -> - ok = imem_meta:write( - ?JOBDYN_TABLE, - J#dperlNodeJobDyn{state = NewState, status = Status, - statusTime = imem_meta:time_uid()}); + if + NewState /= OldState orelse (OldStatus == error andalso Status /= idle) + orelse (OldStatus /= error andalso Status /= OldStatus) + orelse (OldStatus == Status andalso Status == idle) + orelse TimeDiff > 1 -> + ok = imem_meta:write(?JOBDYN_TABLE, + J#dperlNodeJobDyn{ state=NewState, status=Status + , statusTime=imem_meta:time_uid()}); true -> ok end end. @@ -387,9 +408,8 @@ get_last_state(JobName) when is_binary(JobName) -> get_last_audit_time(JobName) -> case get_last_state(JobName) of - #{lastAuditTime := LastAuditTime} -> - LastAuditTime; - _ -> {0,0,0} + #{lastAuditTime := LastAuditTime} -> LastAuditTime; + _ -> ?EPOCH end. count_sibling_jobs(Module, Channel) -> @@ -564,6 +584,8 @@ safe_json_map(Value) when is_binary(Value) -> DecodedValue when is_list(DecodedValue) -> DecodedValue end. +%% sort lists inside a map recursively so that the compare result +%% does not depend on list order -spec normalize_map(map()) -> map(). normalize_map(Map) when is_map(Map)-> maps:map( @@ -719,10 +741,8 @@ activity_logger(StatusCtx, Name, Extra) -> -spec url_enc_params(map()) -> binary(). url_enc_params(Params) -> EParams = maps:fold( - fun(K, {enc, V}, Acc) -> - ["&", K, "=", http_uri:encode(V) | Acc]; - (K, V, Acc) -> - ["&", K, "=", V | Acc] + fun(K, {enc, V}, Acc) -> ["&", K, "=", http_uri:encode(V) | Acc]; + (K, V, Acc) -> ["&", K, "=", V | Acc] end, [], Params), erlang:iolist_to_binary([tl(EParams)]). diff --git a/src/dperl/dperl_strategy_scr.erl b/src/dperl/dperl_strategy_scr.erl index 857037aa..b5c303ea 100644 --- a/src/dperl/dperl_strategy_scr.erl +++ b/src/dperl/dperl_strategy_scr.erl @@ -15,49 +15,70 @@ % check if source data is currently acessible -callback connect_check_src(scrState()) -> - {ok, scrState()} | {error, any()} | {error, any(), scrState()}. + {ok, scrState()} | {error, any()} | {error, any(), scrState()}. % get a key list (limited in length) of recent changes on source side -% often achieved by scanning an audit log after last checked timestamp (from state) -% basis for sync cycle (differential change provisioning) -% for normal cleanup, a list of (possibly) dirty keys is returned -% for c/r jobs, a list of (possibly) dirty KV pairs is returned +% often achieved by scanning an audit log after last checked timestamp (from state). +% Basis for sync cycle (differential change provisioning) +% For normal cleanup, a list of (possibly) dirty keys is returned +% For c/r jobs, a list of (possibly) dirty KV pairs is returned where V can +% stand for (choice of the callback module and more precisely typed there): +% - a scalar value +% - a value map (usually representing a JSON part) +% - some hash of the value or KV pair (e.g. used in copy-jobs) +% - a structured value, e.g. {Content,Meta} +% Structured values can contain properties which should be ignored in +% an overridden compare function is_equal() -callback get_source_events(scrState(), scrBatchSize()) -> - {ok, scrAnyKeys(), scrState()} | - {ok, scrAnyKeyVals(), scrState()} | - {ok, sync_complete, scrState()} | - {error, scrAnyKey()}. + {ok, scrAnyKeys(), scrState()} | + {ok, scrAnyKeyVals(), scrState()} | + {ok, sync_complete, scrState()} | + {error, scrAnyKey()}. % check if destination data is currently acessible -callback connect_check_dst(scrState()) -> - {ok, scrState()} | {error, any()} | {error, any(), scrState()}. + {ok, scrState()} | {error, any()} | {error, any(), scrState()}. % fetch one item from source (if it exists) --callback fetch_src(scrAnyKey(), scrState()) -> ?NOT_FOUND | scrAnyVal(). +% return scrAnyVal() for cleanup +% return scrAnyKeyVal() for c/r +-callback fetch_src(scrAnyKey() | scrAnyKeyVal() , scrState()) -> + ?NOT_FOUND | scrAnyVal() | scrAnyKeyVal(). % fetch one item from destination (if it exists) --callback fetch_dst(scrAnyKey(), scrState()) -> ?NOT_FOUND | scrAnyVal(). +% return scrAnyVal() for cleanup +% return scrAnyKeyVal() for c/r +-callback fetch_dst(scrAnyKey() | scrAnyKeyVal(), scrState()) -> + ?NOT_FOUND | scrAnyVal() | scrAnyKeyVal(). % delete one item from destination (if it exists) -% first element in result is true if a delete was unnecessary -% callback is responsible for deciding if no_op delete is an error or not --callback delete_dst(scrAnyKey(), scrState()) -> {scrSoftError(), scrState()}. +% scrSoftError() = true signals that a soft error happened which is skipped without throwing +% leaving a chance to fix it in one of the next cycles +-callback delete_dst(scrAnyKey() | scrAnyKeyVal(), scrState()) -> + {scrSoftError(), scrState()}. % insert one item to destination (which is assumed to not exist) -% if the callback is able to do an update instead of an insert it can do it responsibly. --callback insert_dst(scrAnyKey(), scrAnyVal(), scrState()) -> {scrSoftError(), scrState()}. +% scrAnyVal() is used to insert the value, not the value part of scrAnyKeyVal() +% scrSoftError() = true signals that a soft error happened which is skipped without throwing +% leaving a chance to fix it in one of the next cycles +-callback insert_dst(scrAnyKey() | scrAnyKeyVal(), scrAnyVal(), scrState()) -> + {scrSoftError(), scrState()}. % update one item in destination (which is assumed to exist) -% if the callback is able to do an insert instead of an update it can do it responsibly. -% ATTENTION: for c/r jobs the first parameter may be a KV Pair from which only the key is used --callback update_dst(scrAnyKey()|scrAnyKeyVal(), scrAnyVal(), scrState()) -> {scrSoftError(), scrState()}. +% scrAnyVal() is used to change the value, not the value part of scrAnyKeyVal() +% scrSoftError() = true signals that a soft error happened which is skipped without throwing +% leaving a chance to fix it in one of the next cycles +-callback update_dst(scrAnyKey() | scrAnyKeyVal(), scrAnyVal(), scrState()) -> + {scrSoftError(), scrState()}. % allow the callback implementation act upon an error or warning message -% result is ignored, used for debugging only --callback report_status(scrAnyKey(), scrStatus(), scrState()) -> ok | no_op | {error, term()}. +% result is ignored and used for debugging only +-callback report_status(scrAnyKey() | scrAnyKeyVal(), scrErrorInfo(), scrState()) -> + ok | no_op | {error, term()}. % execute one more refresh cycle with limited block size --callback do_refresh(scrState(), scrBatchSize()) -> {{ok, scrState()} | {ok, finish, scrState() | error, any()}}. +-callback do_refresh(scrState(), scrBatchSize()) -> + {{ok, scrState()} | {ok, finish, scrState() | error, any()}}. % optional callbacks @@ -66,7 +87,8 @@ LastSuccess::ddTimestamp(), BatchInterval::scrMsecInterval(), % delay between cleanup batches CycleInterval::scrMsecInterval(), % delay between cleanup cycles - scrState()) -> true | false. + scrState()) -> + true | false. % override callback for refresh execution permission -callback should_refresh( LastAttempt::ddTimestamp(), @@ -74,10 +96,12 @@ BatchInterval::scrMsecInterval(), % delay between refresh batches CycleInterval::scrMsecInterval(), % delay between refresh cycles scrHoursOfDay(), - scrState()) -> true | false. + scrState()) -> + true | false. % override for value compare function --callback is_equal(scrAnyKey(), scrAnyVal(), scrAnyVal(), scrState()) -> true | false. +-callback is_equal(scrAnyKey() | scrAnyKeyVal(), scrAnyVal(), scrAnyVal(), scrState()) -> + true | false. % override for destination channel insert/update/delete (final data change) % only used for protected configurations (reversing the direction of data flow) @@ -99,60 +123,76 @@ update_channel/5, finalize_src_events/1, should_sync_log/1]). -% execute simple cleanup for next batch of keys --callback do_cleanup( scrState(), CleanupBulkCount::scrBatchSize()) -> +% execute simple cleanup for accumulated list of dirty keys (or KVPs in case of c/r). +% Must decide if cleanup is finished. +-callback do_cleanup( scrState(), scrBatchSize()) -> {ok, scrState()} | {ok, finish, scrState()} | {error, term(), scrState()} | {error, term()}. -% execute cleanup for found differences (Deletes and Inserts) --callback do_cleanup( Deletes::scrAnyKeys() | scrAnyKeyVal(), - Inserts::scrAnyKeys() | scrAnyKeyVal(), - SearchingMatch::boolean(), % NextLastKey == MinKey +% prepare cleanup sync for found differences (Deletes and Inserts) +% by adding these keys to the sync event list in the state (dirty Keys). +% These (potentially) 'dirty' keys will be dealt with in the following sync cycle +-callback do_cleanup( Deletes::scrAnyKeys(), + Inserts::scrAnyKeys(), + IsCycleComplete::boolean(), % NextLastKey == MinKey scrState()) -> - {ok, scrState()} | {ok, finish, scrState()} | {error, term(), scrState()} | {error, term()}. - -% execute cleanup/refresh for found differences (Deletes, Inserts and value Diffs) --callback do_cleanup( Deletes::scrAnyKeys() | scrAnyKeyVal(), - Inserts::scrAnyKeys() | scrAnyKeyVal(), - Diffs::scrAnyKeys() | scrAnyKeyVal(), - SearchingMatch::boolean(), % NextLastKey == MinKey + {ok, scrState()} | + {ok, finish, scrState()} | + {error, term(), scrState()} | + {error, term()}. + +% prepare cleanup/refresh sync for found differences (Deletes, Diffs, Inserts) +% by adding these Keys to the sync event list in the state (dirty Keys) +% These (potentially) 'dirty' keys will be dealt with in the following sync cycle +-callback do_cleanup( Deletes::scrAnyKeys(), + Inserts::scrAnyKeys(), + Diffs::scrAnyKeys(), + IsCycleComplete::boolean(), % NextLastKey == MinKey scrState()) -> - {ok, scrState()} | {ok, finish, scrState()} | {error, term(), scrState()} | {error, term()}. - -% bulk load one batch of keys (for cleanup) or kv-pairs (cleanup/refresh) from source -% a callback module implementing this and load_dst_after_key signals that it wants -% to fully control the cleanup/refresh procedure --callback load_src_after_key( LastKeySeen::scrAnyKey() | scrAnyKeyVal(), - scrBatchSize(), - scrState()) -> - {ok, scrAnyKeys(), scrState()} | {ok, scrAnyKeyVals(), scrState()} | {error, term(), scrState()}. - -% bulk load one batch of keys (for cleanup) or kv-pairs (cleanup/refresh) from destination -% a callback module implementing this and load_src_after_key signals that it wants -% to fully control the cleanup/refresh procedure --callback load_dst_after_key( LastKeySeen::scrAnyKey() | scrAnyKeyVal(), - scrBatchSize(), - scrState()) -> - {ok, scrAnyKeys(), scrState()} | {ok, scrAnyKeyVals(), scrState()} | {error, any(), scrState()}. - --optional_callbacks([ do_cleanup/2 - , do_cleanup/4 - , do_cleanup/5 - , load_src_after_key/3 - , load_dst_after_key/3 + {ok, scrState()} | + {ok, finish, scrState()} | + {error, term(), scrState()} | + {error, term()}. + +% bulk load one batch of keys (for cleanup) or kv-pairs (cleanup/refresh) from source. +% up to scrBatchSize() existing keys must be returned in key order. +% A callback module implementing this and also the load_dst_after_key callback signals that it wants +% to do override source loading for cleanup or c/r combined processing. +% Returning less than scrBatchSize() items does not prevent further calls of this function. +% If called again in same cycle, {ok, [], scrState()} must be returned. +-callback load_src_after_key(LastSeen::scrAnyKey()|scrAnyKeyVal(), scrBatchSize(), scrState()) -> + {ok, scrAnyKeys(), scrState()} | + {ok, scrAnyKeyVals(), scrState()} | + {error, term(), scrState()}. + +% bulk load one batch of kv-pairs for combined cleanup/refresh from destination. +% A callback module implementing this and load_src_after_key signals that it wants +% to do a cleanup/refresh combined processing. +% Returning less than scrBatchSize() items does not prevent further calls of this function. +% If called again in same cycle, {ok, [], scrState()} must be returned. +-callback load_dst_after_key(LastSeen::scrAnyKey()|scrAnyKeyVal(), scrBatchSize(), scrState()) -> + {ok, scrAnyKeys(), scrState()} | + {ok, scrAnyKeyVals(), scrState()} | + {error, term(), scrState()}. + +-optional_callbacks([ do_cleanup/2 % simple cleanup (or simple cleanup/refresh) + , do_cleanup/4 % cleanup with inserts and deletes only (no updates) + , do_cleanup/5 % cleanup/refresh with updates on KV-Pairs + , load_src_after_key/3 % overrides cleanup or c/r source loading + , load_dst_after_key/3 % overrides cleanup or c/r destination loading ]). -% chunked cleanup context --record(cleanup_ctx,{ srcKeys :: scrAnyKeys()| scrAnyKeyVal() % ????? - , srcCount :: integer() - , dstKeys :: scrAnyKeys()| scrAnyKeyVal() % ????? - , dstCount :: integer() +% chunked cleanup context where scrAnyKeyVal() types are used for c/r combination +-record(cleanup_ctx,{ srcKeys :: scrAnyKeys()| scrAnyKeyVal() + , srcCount :: integer() % length(srcKeys) + , dstKeys :: scrAnyKeys()| scrAnyKeyVal() + , dstCount :: integer() % length(dstKeys) , bulkCount :: scrBatchSize() - , minKey :: scrAnyKey() | scrAnyKeyVal() % ????? - , maxKey :: scrAnyKey() | scrAnyKeyVal() % ????? - , lastKey :: scrAnyKey() | scrAnyKeyVal() % ????? - , deletes = [] :: scrAnyKeys() | scrAnyKeyVals() - , inserts = [] :: scrAnyKeys() | scrAnyKeyVals() - , differences = [] :: scrAnyKeys() | scrAnyKeyVals() + , minKey :: scrAnyKey() | scrAnyKeyVal() + , maxKey :: scrAnyKey() | scrAnyKeyVal() + , lastKey :: scrAnyKey() | scrAnyKeyVal() + , deletes = [] :: scrAnyKeys() % Keys to be deleted + , inserts = [] :: scrAnyKeys() % Keys to be inserted + , differences = [] :: scrAnyKeys() % Keys to modify values }). % Debug macros @@ -301,7 +341,7 @@ execute(cleanup, Mod, Job, State, #{cleanup:=true} = Args) -> LastAttempt =< LastSuccess -> ?JInfo("Starting cleanup cycle"), case Args of - #{stats := #{cleanup_count := CC} = Stats} -> + #{stats := #{cleanup_count:=CC} = Stats} -> Args#{stats => Stats#{cleanup_count => CC + 1}}; Args -> Stats = maps:get(stats, Args, #{}), @@ -309,7 +349,7 @@ execute(cleanup, Mod, Job, State, #{cleanup:=true} = Args) -> end; true -> case Args of - #{stats := #{cleanup_count := CC} = Stats} -> + #{stats := #{cleanup_count:=CC} = Stats} -> Args#{stats => Stats#{cleanup_count => CC + 1}}; Args -> ?JInfo("Resuming cleanup cycle"), @@ -341,31 +381,31 @@ execute(cleanup, Mod, Job, State, #{cleanup:=true} = Args) -> % launch simple cleanup cycle using do_cleanup/2 [State1, CleanupBulkCount]; true -> - % launch cleanup/refresh combined processing using do_cleanup/5 or do_cleanup/4 + % launch cleanup processing using do_cleanup/4 on scalar keys + % launch cleanup/refresh combined processing using do_cleanup/5 on kv-pairs + % note: Key also used here in the sense of kv-pairs (KVP) where the value can itself + % be structured (e.g. {Content::map(), Meta::map()} in Office365 contact sync) #{minKey:=MinKey, maxKey:=MaxKey, lastKey:=LastKey} = CleanupState, - Ctx = #cleanup_ctx{ minKey=MinKey, maxKey=MaxKey - , lastKey=LastKey, bulkCount=CleanupBulkCount}, + Ctx = #cleanup_ctx{ minKey=MinKey, maxKey=MaxKey, lastKey=LastKey + , bulkCount=CleanupBulkCount}, {RefreshCollectResult, State2} = cleanup_refresh_collect(Mod,Ctx,State1), Deletes = maps:get(deletes,RefreshCollectResult), Inserts = maps:get(inserts,RefreshCollectResult), Diffs = maps:get(differences,RefreshCollectResult), NextLastKey = maps:get(lastKey,RefreshCollectResult), - % update last key ToDO: This is UGLY. To be cast into functions !!!! - case dperl_dal:select( - ?JOBDYN_TABLE, - [{#dperlNodeJobDyn{name=Job,_='_'},[],['$_']}]) of - {[#dperlNodeJobDyn{state = #{cleanup := OldCleanupState} - = NodeJobDynState}], true} - when is_map(OldCleanupState) -> + % update last key ToDo: This is UGLY. To be cast into functions !!!! + MatchSpec = [{#dperlNodeJobDyn{name=Job,_='_'},[],['$_']}], + case dperl_dal:select(?JOBDYN_TABLE, MatchSpec) of + {[#dperlNodeJobDyn{state = #{cleanup:=OldState} = NodeJobDyn}], true} + when is_map(OldState) -> dperl_dal:update_job_dyn( Job, - NodeJobDynState#{ + NodeJobDyn#{ cleanup => (case Args1 of - #{stats := #{cleanup_count := CC2}} -> - OldCleanupState#{count => CC2}; - Args1 -> OldCleanupState - end)#{lastKey => NextLastKey}}); + #{stats := #{cleanup_count:=CC2}} -> OldState#{count=>CC2}; + Args1 -> OldState + end)#{lastKey=>NextLastKey}}); _ -> ok end, cleanup_log("Orphan", Deletes), @@ -511,18 +551,24 @@ execute(finish, Mod, Job, State, Args) -> ?RESTART_AFTER(?CYCLE_ALWAYS_WAIT(Mod, Job), Args), State. + +%% evaluate cycle status by -spec get_cycle_state(scrCycle(), jobName()) -> scrCycleState(). -get_cycle_state(Cycle, Job) when (Cycle==cleanup orelse Cycle==refresh) andalso is_binary(Job) -> +get_cycle_state(Cycle, Name) when (Cycle==cleanup orelse Cycle==refresh) andalso is_binary(Name) -> maps:merge( if - Cycle==cleanup -> #{minKey => -1, maxKey => <<255>>, lastKey => 0}; - true -> #{} + Cycle==cleanup -> + #{minKey=>?SCR_MIN_KEY, maxKey=>?SCR_MAX_KEY, lastKey=>?SCR_INIT_KEY}; + true -> + #{} end, case dperl_dal:select( ?JOBDYN_TABLE, - [{#dperlNodeJobDyn{name=Job,state='$1',_='_'},[],['$1']}]) of - {[#{Cycle:=CycleState}], true} when is_map(CycleState) -> CycleState; - {_, true} -> #{lastAttempt => ?EPOCH, lastSuccess => ?EPOCH} + [{#dperlNodeJobDyn{name=Name, state='$1', _='_'}, [], ['$1']}]) of + {[#{Cycle:=CycleState}], true} when is_map(CycleState) -> + CycleState; + {_, true} -> + #{lastAttempt => ?EPOCH, lastSuccess => ?EPOCH} end). % update dperlNodeJobDyn table according to planned action, reset statistics @@ -539,7 +585,7 @@ set_cycle_state(Cycle, Job, Action, Stats0) {[#dperlNodeJobDyn{state=#{Cycle:=CycleState0}} = NJD], true} when is_map(CycleState0) -> {NJD, CycleState0}; {[#dperlNodeJobDyn{} = NJD], true} -> - {NJD, #{lastAttempt=>imem_meta:time(), lastSuccess=>?EPOCH}} % ToDo: Should it be imem_meta:time() ???? + {NJD, #{lastAttempt=>imem_meta:time(), lastSuccess=>?EPOCH}} end, {CycleState2, Stats1} = case maps:get(count, Stats0, '$not_found') of '$not_found' -> {CycleState1, Stats0}; @@ -561,35 +607,50 @@ set_cycle_state(Cycle, Job, Action, Stats0) % default callbacks +%% default scheduling delays according to configured -spec should_cleanup(ddTimestamp(), ddTimestamp(), - scrMsecInterval(), scrMsecInterval()) -> true | false. + scrBatchInterval(), scrCycleInterval()) -> true | false. should_cleanup(LastAttempt, LastSuccess, BatchInterval, CycleInterval) -> - if LastAttempt > LastSuccess -> - imem_datatype:msec_diff(LastAttempt) > BatchInterval; + if LastAttempt > LastSuccess -> + % wait a short time between cleanup batches (100 items typically) + imem_datatype:msec_diff(LastAttempt) > BatchInterval; true -> - imem_datatype:msec_diff(LastSuccess) > CycleInterval + % wait a longer time between full cleanup cycles (all data) + imem_datatype:msec_diff(LastSuccess) > CycleInterval end. --spec should_refresh(ddTimestamp(), ddTimestamp(), scrMsecInterval(), - scrMsecInterval(), scrHoursOfDay()) -> true | false. +-spec should_refresh(ddTimestamp(), ddTimestamp(), scrBatchInterval(), + scrCycleInterval(), scrHoursOfDay()) -> true | false. should_refresh(LastAttempt, LastSuccess, BatchInterval, Interval, Hours) -> if LastAttempt > LastSuccess -> - imem_datatype:msec_diff(LastAttempt) > BatchInterval; + % wait a short time between refresh batches (100 items typically) + imem_datatype:msec_diff(LastAttempt) > BatchInterval; true -> - case imem_datatype:msec_diff(LastSuccess) > Interval of + % wait a longer time between full cleanup cycles (all data) + case imem_datatype:msec_diff(LastSuccess) > Interval of false -> false; true -> - if length(Hours) > 0 -> - {Hour,_,_} = erlang:time(), - case lists:member(Hour, Hours) of - true -> true; - _ -> false - end; - true -> true - end - end + if + length(Hours) > 0 -> + %% only start new cycles during listed hours + {Hour,_,_} = erlang:time(), + case lists:member(Hour, Hours) of + true -> true; + _ -> false + end; + true -> + % start new cycle after the configured delay + true + end + end end. +%% implements a comparer for two objects with the same key. +%% Are the two object values equal (in the sense that they don't +%% need synchronisation? +%% Default semantics used here: exact match after sorting list components +%% The callback module can implement an override comparer which tolerate certain +%% differences. -spec is_equal(scrAnyKey(), scrAnyVal(), scrAnyVal(), scrState()) -> boolean(). is_equal(_Key, S, S, _State) -> true; is_equal(_Key, S, D, _State) when is_map(S), is_map(D) -> @@ -630,16 +691,16 @@ process_events([], Mod, State, _ShouldLog, IsError) -> end; process_events([Key | Keys], Mod, State, ShouldLog, IsError) -> {NewIsError, NewState} = case {Mod:fetch_src(Key, State), Mod:fetch_dst(Key, State)} of - {S, S} -> + {S, S} -> %% nothing to do Mod:report_status(Key, no_op, State), - {IsError, State}; %% nothing to do + {IsError, State}; {{protected, _}, ?NOT_FOUND} -> % pusher protection ?JError("Protected ~p is not found on target", [Key]), Error = <<"Protected key is not found on target">>, Mod:report_status(Key, Error, State), dperl_dal:job_error(Key, <<"sync">>, <<"process_events">>, Error), {true, State}; - {{protected, S}, D} -> % pusher protection + {{protected, S}, D} -> % pusher protection execute_prov_fun("Protected", Mod, update_channel, [Key, true, S, D, State], ShouldLog, IsError, check); {{protected, IsSamePlatform, S}, D} -> % puller protection execute_prov_fun("Protected", Mod, update_channel, [Key, IsSamePlatform, S, D, State], ShouldLog, IsError, check); @@ -709,8 +770,10 @@ execute_prov_fun(Op, Mod, Fun, Args, ShouldLog, IsError, check) -> end. -spec cleanup_refresh_collect(jobModule(), #cleanup_ctx{}, scrState()) -> - #{deletes => scrAnyKeys(), inserts => scrAnyKeys(), lastKey => scrAnyKey()}. + #{deletes => scrAnyKeyVals(), inserts => scrAnyKeyVals(), lastKey => scrAnyKeyVal()}. cleanup_refresh_collect(Mod, CleanupCtx, State) -> + % note: Key used here in the sense of key-value-pair (KVP) where the value can itself + % be structured (e.g. {Content::map(), Meta::map()} in Office365 contact sync) #cleanup_ctx{minKey=MinKey, maxKey=MaxKey, lastKey=LastKey, bulkCount=BulkCnt} = CleanupCtx, CurKey = if LastKey =< MinKey -> MinKey; % throw to cycle start if getting @@ -724,7 +787,7 @@ cleanup_refresh_collect(Mod, CleanupCtx, State) -> ?JError("cleanup failed at load_src_after_key : ~p", [Error]), dperl_dal:job_error(<<"cleanup">>, <<"load_src_after_key">>, Error), error({step_failed, State1}); - SKeys -> {SKeys, State} + SKeys -> {SKeys, State} % deprecated simple API which returns the kv-pairs only end, {DstKeys, State4} = case Mod:load_dst_after_key(CurKey, BulkCnt, State2) of @@ -733,52 +796,55 @@ cleanup_refresh_collect(Mod, CleanupCtx, State) -> ?JError("cleanup failed at load_dst_after_key : ~p", [Error1]), dperl_dal:job_error(<<"cleanup">>, <<"load_dst_after_key">>, Error1), error({step_failed, State3}); - DKeys -> {DKeys, State2} + DKeys -> {DKeys, State2} % deprecated simple API which returns the kv-pairs only end, - {cleanup_refresh_compare(CleanupCtx#cleanup_ctx{ - srcKeys = SrcKeys, srcCount = length(SrcKeys), - dstKeys = DstKeys, dstCount = length(DstKeys), lastKey = CurKey}), State4}. - --spec cleanup_refresh_compare(#cleanup_ctx{}) -> - #{deletes=>scrAnyKeys(), differences=>scrAnyKeys() - , inserts=>scrAnyKeys(), lastKey=>scrAnyKey()}. -cleanup_refresh_compare(#cleanup_ctx{ - srcKeys=SrcKeys, dstKeys=[], deletes=Deletes, - inserts=Inserts, minKey=MinKey, differences=Diffs, - dstCount=DstCount, bulkCount=BulkCnt, srcCount=SrcCount}) + {cleanup_refresh_compare(CleanupCtx#cleanup_ctx{ srcKeys=SrcKeys, srcCount=length(SrcKeys) + , dstKeys=DstKeys, dstCount=length(DstKeys) + , lastKey=CurKey}), State4}. + +-spec cleanup_refresh_compare(#cleanup_ctx{}) -> #{ deletes=>scrAnyKeys(), differences=>scrAnyKeys() + , inserts=>scrAnyKeys(), lastKey=>scrAnyKey()}. +cleanup_refresh_compare(#cleanup_ctx{ srcKeys=SrcKeys, dstKeys=[] + , deletes=Deletes,inserts=Inserts, differences=Diffs + , srcCount=SrcCount, dstCount=DstCount, bulkCount=BulkCnt + , minKey=MinKey}) when DstCount < BulkCnt, SrcCount < BulkCnt -> - Remaining = fetch_keys(SrcKeys), + Remaining = take_keys(SrcKeys), #{deletes=>Deletes, differences=>Diffs, inserts=>Inserts++Remaining, lastKey=>MinKey}; -cleanup_refresh_compare(#cleanup_ctx{srcKeys=SrcKeys, dstKeys=[], deletes=Deletes - , dstCount=DstCount, inserts=Inserts, differences=Diffs}) +cleanup_refresh_compare(#cleanup_ctx{ srcKeys=SrcKeys, dstKeys=[] + , deletes=Deletes, inserts=Inserts, differences=Diffs + , dstCount=DstCount}) when DstCount == 0 -> - Remaining = fetch_keys(SrcKeys), + Remaining = take_keys(SrcKeys), #{deletes=>Deletes, differences=>Diffs, inserts=>Inserts++Remaining, lastKey=>last_key(SrcKeys)}; -cleanup_refresh_compare(#cleanup_ctx{dstKeys=[], deletes=Deletes, differences=Diffs, - inserts=Inserts, lastKey=LK}) -> +cleanup_refresh_compare(#cleanup_ctx{ dstKeys=[] + , deletes=Deletes, inserts=Inserts, differences=Diffs + , lastKey=LK}) -> #{deletes=>Deletes, differences=>Diffs, inserts=>Inserts, lastKey=>LK}; -cleanup_refresh_compare(#cleanup_ctx{ srcCount=SrcCount, dstKeys=DstKeys,bulkCount=BulkCnt - , minKey=MinKey, srcKeys=[], deletes=Deletes - , inserts=Inserts, dstCount=DstCount, differences=Diffs}) +cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[], dstKeys=DstKeys, minKey=MinKey + , deletes=Deletes, inserts=Inserts, differences=Diffs + , srcCount=SrcCount, dstCount=DstCount, bulkCount=BulkCnt}) when SrcCount < BulkCnt, DstCount < BulkCnt -> - Remaining = fetch_keys(DstKeys), + Remaining = take_keys(DstKeys), #{deletes=>Deletes++Remaining, differences=>Diffs, inserts=>Inserts, lastKey=>MinKey}; -cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[], deletes=Deletes, inserts=Inserts - , dstKeys=DstKeys, differences=Diffs, srcCount=SrcCount}) +cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[], dstKeys=DstKeys + , deletes=Deletes, inserts=Inserts, differences=Diffs + , srcCount=SrcCount}) when SrcCount == 0 -> - Remaining = fetch_keys(DstKeys), + Remaining = take_keys(DstKeys), #{deletes=>Deletes++Remaining, differences=>Diffs, inserts=>Inserts, lastKey=>last_key(DstKeys)}; -cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[], deletes=Deletes, differences=Diffs - , inserts=Inserts, lastKey=LK}) -> +cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[] + , deletes=Deletes, inserts=Inserts, differences=Diffs + , lastKey=LK}) -> #{deletes=>Deletes, differences=>Diffs, inserts=>Inserts, lastKey=>LK}; cleanup_refresh_compare(#cleanup_ctx{srcKeys=[K|SrcKeys], dstKeys=[K|DstKeys]} = CleanupCtx) -> cleanup_refresh_compare(CleanupCtx#cleanup_ctx{ srcKeys=SrcKeys, dstKeys=DstKeys - , lastKey = last_key([K])}); -cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[{K, _} | SrcKeys], dstKeys=[{K, _} | DstKeys] + , lastKey=last_key([K])}); +cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[{K, _}|SrcKeys], dstKeys=[{K, _}|DstKeys] , differences=Diffs} = CleanupCtx) -> cleanup_refresh_compare(CleanupCtx#cleanup_ctx{ srcKeys=SrcKeys, dstKeys=DstKeys - , lastKey=K, differences=[K | Diffs]}); -cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[SK|SrcKeys], dstKeys=[DK | DstKeys] + , lastKey=K, differences=[K|Diffs]}); +cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[SK|SrcKeys], dstKeys=[DK|DstKeys] , inserts=Inserts, deletes=Deletes} = CleanupCtx) -> case {last_key([SK]), last_key([DK])} of {K1, K2} when K1 < K2 -> cleanup_refresh_compare( @@ -787,12 +853,12 @@ cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[SK|SrcKeys], dstKeys=[DK | DstKey CleanupCtx#cleanup_ctx{dstKeys=DstKeys, deletes=[K2|Deletes], lastKey=K2}) end. --spec fetch_keys(scrAnyKeys()) -> scrAnyKeys(). -fetch_keys([]) -> []; -fetch_keys([{_, _} | _] = KVs) -> [K || {K, _} <- KVs]; -fetch_keys(Keys) -> Keys. +-spec take_keys(scrAnyKeys()|scrAnyKeyVals()) -> scrAnyKeys(). +take_keys([]) -> []; +take_keys([{_, _} | _] = KVs) -> [K || {K, _} <- KVs]; +take_keys(Keys) -> Keys. --spec last_key(scrAnyKeys()) -> scrAnyKey(). +-spec last_key(scrAnyKeys()|scrAnyKeyVals()) -> scrAnyKey(). last_key([{_, _} | _] = KVs) -> element(1, lists:last(KVs)); last_key(Keys) -> lists:last(Keys). @@ -830,8 +896,7 @@ cleanup_refresh_compare_test() -> DstKeys = lists:usort([rand:uniform(3000) || _ <- lists:seq(1, DstCount)]), {#{deletes := Dels, inserts := Ins}, _} = cleanup_refresh_collect(?MODULE, - #cleanup_ctx{minKey = -1, maxKey = <<255>>, - lastKey = 0, bulkCount = BulkCnt}, + #cleanup_ctx{minKey=?SCR_MIN_KEY, maxKey=?SCR_MAX_KEY, lastKey=?SCR_INIT_KEY, bulkCount=BulkCnt}, {SrcKeys, DstKeys}), Cleaned = lists:sort(lists:foldr(fun(K, Acc) -> case lists:member(K, Dels) of @@ -844,18 +909,18 @@ cleanup_refresh_compare_test() -> complete_cleanup_refresh(AllSrcKeys, AllDstKeys) -> BulkCnt = 100, - MaxKey = <<255>>, - Ctx = #cleanup_ctx{minKey = -1, maxKey = MaxKey, lastKey = 0, - bulkCount = BulkCnt}, - #{deletes := Dels, differences := Diffs, inserts := Ins} = cleanup_refresh_loop(Ctx, 0, {AllSrcKeys, AllDstKeys}, #{}), + MaxKey = ?SCR_MAX_KEY, + Ctx = #cleanup_ctx{minKey=?SCR_MIN_KEY, maxKey=MaxKey, lastKey=?SCR_INIT_KEY, bulkCount=BulkCnt}, + #{deletes:=Dels, differences:=Diffs, inserts:=Ins} = + cleanup_refresh_loop(Ctx, ?SCR_INIT_KEY, {AllSrcKeys, AllDstKeys}, #{}), Cleaned = lists:usort(lists:foldr(fun(K, Acc) -> case lists:member(K, Dels) of true -> lists:delete(K, Acc); false -> Acc end - end, fetch_keys(AllDstKeys), Dels) ++ Ins), - ?assertEqual(Cleaned, lists:usort(fetch_keys(AllSrcKeys))), + end, take_keys(AllDstKeys), Dels) ++ Ins), + ?assertEqual(Cleaned, lists:usort(take_keys(AllSrcKeys))), Diffs1 = lists:usort(lists:foldl( fun({K, V}, Acc) -> case lists:keyfind(K, 1, AllDstKeys) of @@ -868,15 +933,14 @@ complete_cleanup_refresh(AllSrcKeys, AllDstKeys) -> ?assertEqual(Diffs1, lists:usort(Diffs)). complete_cleanup_refresh(AllSrcKeys, AllDstKeys, BulkCnt) -> - MaxKey = <<255>>, - Ctx = #cleanup_ctx{minKey = -1, maxKey = MaxKey, lastKey = 0, - bulkCount = BulkCnt}, + MaxKey = ?SCR_MAX_KEY, + Ctx = #cleanup_ctx{minKey=?SCR_MIN_KEY, maxKey=MaxKey, lastKey=?SCR_INIT_KEY, bulkCount=BulkCnt}, cleanup_refresh_collect(?MODULE, Ctx, {AllSrcKeys, AllDstKeys}). -cleanup_refresh_loop(_, -1, _, Acc) -> Acc; +cleanup_refresh_loop(_, ?SCR_MIN_KEY, _, Acc) -> Acc; cleanup_refresh_loop(Ctx, CurKey, AllKeys, Acc) -> - {#{deletes := Dels, differences := Diffs, inserts := Ins, lastKey := LastKey}, _} = - cleanup_refresh_collect(?MODULE, Ctx#cleanup_ctx{lastKey = CurKey}, AllKeys), + {#{deletes:=Dels, differences:=Diffs, inserts:=Ins, lastKey:=LastKey}, _} = + cleanup_refresh_collect(?MODULE, Ctx#cleanup_ctx{lastKey=CurKey}, AllKeys), NewAcc = Acc#{deletes => Dels ++ maps:get(deletes, Acc, []), differences => Diffs ++ maps:get(differences, Acc, []), inserts => Ins ++ maps:get(inserts, Acc, [])}, diff --git a/src/dperl/dperl_strategy_scr.hrl b/src/dperl/dperl_strategy_scr.hrl index 8cd95697..2e4e2bf0 100644 --- a/src/dperl/dperl_strategy_scr.hrl +++ b/src/dperl/dperl_strategy_scr.hrl @@ -13,17 +13,28 @@ -type scrChannel() :: binary(). -type scrDirection() :: push | pull. -type scrCycle() :: cleanup | refresh. % stateful phases (sync is stateless) --type scrCycleState() :: map(). % initially: #{lastAttempt => ?EPOCH, lastSuccess => ?EPOCH} +-type scrCycleState() :: map(). % behaviour overall state per scrCycle() + % initially for both: #{lastAttempt => ?EPOCH, lastSuccess => ?EPOCH} + % initially for cleanup: #{minKey=>?SCR_MIN_KEY, maxKey=>?SCR_MAX_KEY, lastKey=>?SCR_INIT_KEY}; + % initially for refresh: #{} + % ToDo: Are keys limited to non_neg_integer() / other Erlang types ???? -type scrPhase() :: sync | scrCycle(). -type scrBatchSize() :: integer(). --type scrMessage() :: binary(). --type scrStatus() :: no_op | {error,term()} | scrMessage(). +-type scrMessage() :: binary(). +-type scrErrorInfo() :: no_op | {error,term()} | scrMessage(). +-type scrDynStatus() :: map(). % what should be stored in dperlNodeJobDyn table for this job. -type scrOperation() :: finalize_src_events | no_log | string(). % "Protected"|"Deleted"|"Inserted"|"Updated". -type scrSoftError() :: true|false|idle. % idle only used in special case where the dst is not ready and we have to do an idle wait. % true signifies the one or more events in the sync cycle resulted in an error % false means all the events were processed successfully by the sync cycle -type scrMsecInterval() :: integer(). % delays in milli-seconds +-type scrBatchInterval() :: scrMsecInterval(). % delay from end of c or r batch to start of next +-type scrCycleInterval() :: scrMsecInterval(). % delay from end of c or r cycle to start of next -type scrHoursOfDay() :: [integer()]. % execute only on these hours, [] = any hour +-define(SCR_MIN_KEY, -1). % initial MinKey value. Real keys must be bigger (in Erlang sort order) +-define(SCR_MAX_KEY, <<255>>). % initial MaxKey value. Real keys must be smaller (in Erlang sort order) +-define(SCR_INIT_KEY, 0). % initial LastKey value. Real keys must be bigger (in Erlang sort order) + -endif. \ No newline at end of file diff --git a/src/dperl/jobs/dpjob_office_365.erl b/src/dperl/jobs/dpjob_office_365.erl index ea47af89..3cb0f5c5 100644 --- a/src/dperl/jobs/dpjob_office_365.erl +++ b/src/dperl/jobs/dpjob_office_365.erl @@ -1,21 +1,39 @@ -module(dpjob_office_365). +%% implements scr puller (and later also) pusher for Office356 contact data as a c/r job. +%% The KeyPrefix ["contacts","Office365"] can be altered to camouflage the nature of the data. +%% This also supports multiple Office365 synchronisations on the same table, if needed + +%% sync order: remKey() which is the binary contact Id on the remote side (cloud) +%% remKey() is injected into the value as the primary value of a META attribute with one META +%% object per sync job. Multiple cloud contact lists can be merged into one local table key range. +%% The intermediate table (could be an encrypted skvh table). It uses an index on remKeys() +%% for lookup and for cleanup scanning in remKey() order. + -include("../dperl.hrl"). -include("../dperl_strategy_scr.hrl"). -behavior(dperl_worker). -behavior(dperl_strategy_scr). --type locKey() :: [string()]. % local key of a contact, e.g. ["contact","My","Ah2hA77a"] --type locId() :: string(). % last item in locKey() is called local id e.g. "Ah2hA77a" --type locVal() :: map(). % local cvalue of a contact, converted to a map for processing -%-type locBin() :: binary(). % local cvalue of a contact in binary form (often stored like that) --type remKey() :: binary(). % remote key of a contact, called <<"id">> in Office365 --type remKeys():: [remKey()]. % list of remKey() type (e.g. DirtyKeys) --type remVal() :: map(). % remote value of a contact (relevant fields only) -%-type remBin() :: binary(). % remote value of a contract in raw binary JSON form --type meta() :: map(). % contact meta information with respect to this remote cloud +-type remKey() :: binary(). % remote key, <<"id">> in Office365 (cleanup sort order) +-type remKeys() :: [remKey()]. % remote keys +-type remVal() :: map(). % remote value of a contact (relevant fields only) +-type remMeta() :: map(). % contact meta information with respect to this remote cloud +-type remKVP() :: {remKey(), {remVal(), remMeta()}}. % remote key value pair +-type remKVPs() :: [remKVP()]. % list of remote key value pairs + +-type locKey() :: [string()]. % local key of a contact, e.g. ["contact","My","Ah2hA77a"] +-type locId() :: string(). % last item in locKey() is called local id e.g. "Ah2hA77a" +-type locVal() :: map(). % sync relevant part of local cvalue of a contact +-type locMeta() :: map(). % meta part of local cvalue of a contact +-type locKVP() :: {remKey(), {locVal(), locMeta()}}. % local key value pair +-type locKVPs() :: [locKVP()]. % local key value pairs +-type token() :: binary(). % OAuth access token (refreshed after 'unauthorized') + +-define(COMPARE_MIN_KEY, <<>>). % cleanup traversal by external id, scan starting after this key +-define(COMPARE_MAX_KEY, <<255>>>). % scan ending at or beyond this key -define(OAUTH2_CONFIG(__JOB_NAME), ?GET_CONFIG(oAuth2Config, @@ -59,26 +77,32 @@ -define(CONTENT_ATTRIBUTES(__JOB_NAME), ?GET_CONFIG(contactAttributes, [__JOB_NAME], - [<<"businessPhones">>,<<"mobilePhone">> %,<<"title">> ,<<"personalNotes">> - ,<<"companyName">>,<<"emailAddresses">> % ,<<"middleName">>,<<"businessHomePage">> - ,<<"assistantName">>,<<"department">> % ,<<"children">>,<<"officeLocation">> - ,<<"profession">>,<<"givenName">>,<<"categories">>,<<"jobTitle">> % ,<<"nickName">>,<<"yomiGivenName">> - ,<<"surname">>,<<"imAddresses">>,<<"businessAddress">> % ,<<"spouseName">>,<<"yomiSurname">> - ,<<"manager">> % ,<<"generation">>,<<"initials">>,<<"displayName">> - % ,<<"homeAddress">>,<<"otherAddress">>,<<"homePhones">>,<<"fileAs">>,<<"yomiCompanyName">>,<<"birthday">> + [<<"companyName">> + ,<<"givenName">>,<<"surname">>,<<"jobTitle">>,<<"profession">> + %,<<"emailAddresses">>,<<"businessPhones">>,<<"mobilePhone">>,<<"homePhones">>,<<"imAddresses">> + %,<<"department">>,<<"manager">>,<<"assistantName">> + %,<<"businessAddress">> + %,<<"officeLocation">>,<<"businessHomePage">> + %,<<"displayName">>,<<"title">>,<<"middleName">>,<<"initials">>,<<"nickName">> + %,<<"birthday">>,<<"categories">>,<<"personalNotes">> + %,<<"spouseName">>,<<"children">>,<<"generation">> + %,<<"yomiSurname">>,<<"yomiGivenName">>,<<"yomiCompanyName">> + %,<<"homeAddress">>,<<"otherAddress">>,<<"fileAs">> ], "Attributes to be synced for Office365 contact data" ) ). -define(META_ATTRIBUTES(__JOB_NAME), - ?GET_CONFIG(contactAttributes, + ?GET_CONFIG(metaAttributes, [__JOB_NAME], [<<"id">> - ,<<"lastModifiedDateTime">> - ,<<"changeKey">> %,<<"parentFolderId">>,<<"createdDateTime">> + %,<<"lastModifiedDateTime">> + %,<<"changeKey">> + %,<<"parentFolderId">> + %,<<"createdDateTime">> ], - "Attributes used for Office365 contact change tracking" + "Meta attributes used for Office365 contact change tracking" ) ). @@ -95,28 +119,21 @@ % contacts graph api % https://docs.microsoft.com/en-us/graph/api/resources/contact?view=graph-rest-1.0 -%% remote item (single contact info in cache) --record(remItem, { remKey :: remKey() % remote key (id) - , meta :: meta() % META information (id, ...) - , content :: remVal() % relevant contact info to be synced - }). - %% scr processing state -record(state, { name :: jobName() , type = pull :: scrDirection() , channel :: scrChannel() % channel name , keyPrefix :: locKey() % key space prefix in channel , tokenPrefix :: locKey() % without id #token# - , token :: map() % token info as stored under #token# - , apiUrl :: binary() - , fetchUrl :: binary() - , dirtyKeys = [] :: remKeys() % needing insert/update/delete - , remItems = [] :: list(#remItem{}) % cache for cleanup / ToDo: remove - , isConnected = true :: boolean() + , token :: token() % access token binary + , apiUrl :: string() + , fetchUrl :: string() + , cycleBuffer = [] :: remKVPs() | remKVPs() % dirty buffer for one c/r cycle + , isConnected = true :: boolean() % fail unauthorized on first use , isFirstSync = true :: boolean() , isCleanupFinished = true :: boolean() , auditStartTime = {0,0} :: ddTimestamp() % UTC timestamp {Sec,MicroSec} - , template = ?NOT_FOUND :: ?NOT_FOUND|map() % empty contact with default values + , template = #{} :: locVal() % empty contact with default values , accountId :: ddEntityId() % data owner }). @@ -145,241 +162,328 @@ , get_key_prefix/1 ]). +-spec get_auth_config() -> map(). get_auth_config() -> ?OAUTH2_CONFIG(<<>>). +-spec get_auth_config(jobName()) -> map(). get_auth_config(JobName) -> ?OAUTH2_CONFIG(JobName). +-spec get_auth_token_key_prefix() -> locKey(). get_auth_token_key_prefix() -> ?OAUTH2_TOKEN_KEY_PREFIX(<<>>). +-spec get_auth_token_key_prefix(jobName()) -> locKey(). get_auth_token_key_prefix(JobName) -> ?OAUTH2_TOKEN_KEY_PREFIX(JobName). +-spec get_key_prefix() -> locKey(). get_key_prefix() -> ?KEY_PREFIX(<<>>). +-spec get_key_prefix(jobName()) -> locKey(). get_key_prefix(JobName) -> ?KEY_PREFIX(JobName). % determine the local id as the last piece of ckey (if available) -% or hash of remote id (if new to local store) -% this id is a string representing a hash of the remote id --spec local_id(locKey()) -> locId(). -local_id(Key) when is_list(Key) -> lists:last(Key). - +%-spec local_id(locKey()) -> locId(). +%local_id(Key) when is_list(Key) -> lists:last(Key). % calculate a new local id as a string representing the hash of the remote key (id) --spec new_local_id(remKey()) -> locKey(). +-spec new_local_id(remKey()) -> locId(). new_local_id(RemKey) when is_binary(RemKey) -> io_lib:format("~.36B",[erlang:phash2(RemKey)]). +-spec get_local_key(remKey(), jobName(), scrChannel(), locKey()) -> locKey() | ?NOT_FOUND. +get_local_key(Id, _Name, Channel, KeyPrefix) -> + Stu = {?CONTACT_INDEXID,Id}, + case dperl_dal:read_channel_index_key_prefix(Channel, Stu, KeyPrefix) of + [] -> ?NOT_FOUND; + [Key] -> Key % ToDo: Check/filter with META key = Name + end. + % convert list of remote values (already maps) to list of {Key,RemoteId,RemoteValue} triples % which serves as a lookup buffer of the complete remote state, avoiding sorting issues --spec format_remote_values_to_kv(remVal(), locKey(), jobName()) -> remVal(). -format_remote_values_to_kv(Values, KeyPrefix, JobName) -> - format_remote_values_to_kv(Values, KeyPrefix, JobName, []). - -format_remote_values_to_kv([], _KeyPrefix, _JobName, Acc) -> Acc; -format_remote_values_to_kv([Value|Values], KeyPrefix, JobName, Acc) -> - #{<<"id">> := RemoteId} = Value, - Key = KeyPrefix ++ [new_local_id(RemoteId)], - format_remote_values_to_kv(Values, KeyPrefix, JobName, [{Key,RemoteId,format_value(Value, JobName)}|Acc]). - -% format remote or local value by projecting it down to configured list of synced (meta + content) attributes -format_value(Value, JobName) when is_map(Value) -> - maps:with(?META_ATTRIBUTES(JobName)++?CONTENT_ATTRIBUTES(JobName), Value). +% -spec format_remote_values_to_kv(remVal(), locKey(), jobName()) -> remVal(). +% format_remote_values_to_kv(Values, KeyPrefix, JobName) -> +% format_remote_values_to_kv(Values, KeyPrefix, JobName, []). + +% format_remote_values_to_kv([], _KeyPrefix, _JobName, Acc) -> Acc; +% format_remote_values_to_kv([Value|Values], KeyPrefix, JobName, Acc) -> +% #{<<"id">> := RemoteId} = Value, +% Key = KeyPrefix ++ [new_local_id(RemoteId)], +% format_remote_values_to_kv(Values, KeyPrefix, JobName, [{Key,RemoteId,format_value(Value, JobName)}|Acc]). + +% format remote value into a special KV pair with seperated contact- and meta-maps +% by projecting out syncable attributes(content / meta) +-spec remote_kvp(remVal(), jobName()) -> remKVP(). +remote_kvp(#{<<"id">>:=Id} = Value, Name) when is_map(Value) -> + {Id, {maps:with(?CONTENT_ATTRIBUTES(Name), Value), maps:with(?META_ATTRIBUTES(Name), Value)}}. + +% format local value into a local KV pair +% by projecting out syncable attributes(content / meta) +-spec local_kvp(remKey(), locVal(), jobName()) -> locKVP(). +local_kvp(Id, Value, Name) when is_map(Value) -> + {Id, {maps:with(?CONTENT_ATTRIBUTES(Name), Value), maps:with(?META_ATTRIBUTES(Name), Value)}}. -spec connect_check_src(#state{}) -> {ok,#state{}} | {error,any()} | {error,any(), #state{}}. connect_check_src(#state{isConnected=true} = State) -> {ok, State}; -connect_check_src(#state{isConnected=false, accountId=AccountId, tokenPrefix=TokenPrefix} = State) -> +connect_check_src(#state{isConnected=true, type=push} = State) -> + {ok, State#state{isConnected=true}}; +connect_check_src(#state{isConnected=false, type=pull, accountId=AccountId, tokenPrefix=TokenPrefix} = State) -> ?JTrace("Refreshing access token"), case dderl_oauth:refresh_access_token(AccountId, TokenPrefix, ?SYNC_OFFICE365) of - {ok, Token} -> - ?Info("new access token fetched"), + {ok, Token} -> %?Info("new access token fetched"), {ok, State#state{token=Token, isConnected=true}}; {error, Error} -> - ?JError("Unexpected response : ~p", [Error]), + ?JError("Unexpected response refreshing access token: ~p", [Error]), {error, Error, State} end. -spec get_source_events(#state{}, scrBatchSize()) -> - {ok,remKeys(),#state{}} | {ok,sync_complete,#state{}}. % {error,scrAnyKey()} -get_source_events(#state{auditStartTime=LastStartTime, type=push, - channel=Channel, isFirstSync=IsFirstSync} = State, BulkSize) -> - case dperl_dal:read_audit_keys(Channel, LastStartTime, BulkSize) of - {LastStartTime, LastStartTime, []} -> - if - IsFirstSync -> - ?JInfo("Audit rollup is complete"), - {ok, sync_complete, State#state{isFirstSync=false}}; - true -> - {ok, sync_complete, State} - end; - {_StartTime, NextStartTime, []} -> - {ok, [], State#state{auditStartTime=NextStartTime}}; - {_StartTime, NextStartTime, Keys} -> - UniqueKeys = lists:delete(undefined, lists:usort(Keys)), - {ok, UniqueKeys, State#state{auditStartTime=NextStartTime}} - end; -get_source_events(#state{dirtyKeys=[]} = State, _BulkSize) -> + {ok, remKVPs(), #state{}} | {ok, sync_complete, #state{}}. % {error,scrAnyKey()} +% get_source_events(#state{auditStartTime=LastStartTime, type=push, +% channel=Channel, isFirstSync=IsFirstSync} = State, BulkSize) -> +% case dperl_dal:read_audit_keys(Channel, LastStartTime, BulkSize) of +% {LastStartTime, LastStartTime, []} -> +% if +% IsFirstSync -> +% ?JInfo("Audit rollup is complete"), +% {ok, sync_complete, State#state{isFirstSync=false}}; +% true -> +% {ok, sync_complete, State} +% end; +% {_StartTime, NextStartTime, []} -> +% {ok, [], State#state{auditStartTime=NextStartTime}}; +% {_StartTime, NextStartTime, Keys} -> +% UniqueKeys = lists:delete(undefined, lists:usort(Keys)), +% {ok, UniqueKeys, State#state{auditStartTime=NextStartTime}} +% end; +get_source_events(#state{cycleBuffer=[]} = State, _BulkSize) -> {ok, sync_complete, State}; -get_source_events(#state{dirtyKeys=DirtyKeys} = State, _BulkSize) -> - ?Info("get_source_events result count ~p~n~p",[length(DirtyKeys), hd(DirtyKeys)]), - {ok, DirtyKeys, State#state{dirtyKeys=[]}}. +get_source_events(#state{cycleBuffer=CycleBuffer} = State, _BulkSize) -> + ?Info("get_source_events result count ~p~n~p",[length(CycleBuffer), hd(CycleBuffer)]), + {ok, CycleBuffer, State#state{cycleBuffer=[]}}. -- spec connect_check_dst(#state{}) -> {ok, #state{}}. % {error,any()} | {error,any(),#state{}} -connect_check_dst(State) -> {ok, State}. % Question: Why defaulted for push destination? +- spec connect_check_dst(#state{}) -> {ok, #state{}}. % | {error, term()} | {error, term(), #state{}} +connect_check_dst(State) -> {ok, State}. % ToDo: Maybe implement for push destination? do_refresh(_State, _BulkSize) -> {error, cleanup_only}. % using cleanup/refresh combined --spec fetch_src(remKey(), #state{}) -> ?NOT_FOUND | locVal() | remVal(). -fetch_src(Key, #state{channel=Channel, type=push}) -> - dperl_dal:read_channel(Channel, Key); -fetch_src(Key, #state{remItems=RemItems, type=pull}) -> - case lists:keyfind(Key, 1, RemItems) of - {Key, _RemoteId, Value} -> Value; - false -> ?NOT_FOUND +-spec fetch_src(remKey(), #state{}) -> ?NOT_FOUND | {remVal(), remMeta()}. +% fetch_src(Key, #state{channel=Channel, type=push}) -> +% dperl_dal:read_channel(Channel, Key); +fetch_src(Id, #state{name=Name, type=pull, apiUrl=ApiUrl, token=Token} = State) -> + ContactUrl = ApiUrl ++ binary_to_list(Id), + ?JTrace("Fetching contact with url : ~s", [ContactUrl]), + case exec_req(ContactUrl, Token) of + {error, unauthorized} -> reconnect_exec(State, fetch_src, [Id]); + {error, Error} -> {error, Error, State}; + #{<<"id">> := _} = RemVal -> remote_kvp(RemVal, Name); + _ -> ?NOT_FOUND end. --spec fetch_dst(remKey(), #state{}) -> ?NOT_FOUND | locVal() | remVal(). -fetch_dst(Key, #state{ name=Name, remItems=RemItems, type=push - , apiUrl=ApiUrl, token=Token} = State) -> - case lists:keyfind(Key, 1, RemItems) of - {Key, RemoteId, _Value} -> - ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), - case exec_req(ContactUrl, Token) of - #{<<"id">> := _} = RValue -> format_value(RValue, Name); - {error, unauthorized} -> reconnect_exec(State, fetch_dst, [Key]); - {error, Error} -> {error, Error}; - _ -> ?NOT_FOUND - end; - false -> - ?NOT_FOUND - end; -fetch_dst(Key, #state{channel=Channel}) -> - dperl_dal:read_channel(Channel, Key). - --spec insert_dst(remKey(), remVal()|locVal(), #state{}) -> {error, any()}. -insert_dst(Key, Value, #state{type=push, apiUrl=ApiUrl, token=Token} = State) -> - case exec_req(ApiUrl, Token, Value, post) of - #{<<"id">> := _} = RemoteValue -> merge_meta_to_local(Key, RemoteValue, State); - {error, unauthorized} -> reconnect_exec(State, insert_dst, [Key, Value]); - {error, Error} -> {error, Error} - end; -insert_dst(Key, Value, State) -> - Result = update_dst(Key, Value, State), - ?Info("insert_dst ~p~n~p~nresult ~p",[Key, Value, Result]), - Result. - -merge_meta_to_local(Key, RemoteValue, #state{channel=Channel, tokenPrefix=TokenPrefix} = State) -> - AccessId = access_id(TokenPrefix), - MetaItem = #{<<"id">> => maps:get(<<"id">>, RemoteValue)}, - case dperl_dal:read_channel(Channel, Key) of - #{<<"META">> := Meta} = LocVal -> - case maps:merge(Meta, #{AccessId => MetaItem}) of - Meta -> - ok; % RemoteMeta already there - NewM -> - MergedBin = imem_json:encode(LocVal#{<<"META">> => NewM}), - dperl_dal:write_channel(Channel, Key, MergedBin) - end; - LocVal -> - MergedBin = imem_json:encode(LocVal#{<<"META">> => MetaItem}), - dperl_dal:write_channel(Channel, Key, MergedBin) - end, - {false, State}. - -access_id(TokenPrefix) -> - list_to_binary(string:join(TokenPrefix,"/")). +-spec fetch_dst(remKVP() | locKVP(), #state{}) -> ?NOT_FOUND | {locVal(), locMeta()} | {remVal(), remMeta()}. +% fetch_dst(Key, #state{ name=Name, remItems=RemItems, type=push +% , apiUrl=ApiUrl, token=Token} = State) -> +% case lists:keyfind(Key, 1, RemItems) of +% {Key, RemoteId, _Value} -> +% ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), +% case exec_req(ContactUrl, Token) of +% #{<<"id">> := _} = RValue -> format_value(RValue, Name); +% {error, unauthorized} -> reconnect_exec(State, fetch_dst, [Key]); +% {error, Error} -> {error, Error}; +% _ -> ?NOT_FOUND +% end; +% false -> +% ?NOT_FOUND +% end; +fetch_dst(Id, #state{name=Name, channel=Channel, keyPrefix=KeyPrefix, type=pull}) -> + Key = get_local_key(Id, Name, Channel, KeyPrefix), + case dperl_dal:read_channel(Channel, Key) of + ?NOT_FOUND -> ?NOT_FOUND; + Value -> local_kvp(Id, Value, Name) + end. --spec delete_dst(remKey(), #state{}) -> {scrSoftError(), #state{}}. -delete_dst(Key, #state{channel=Channel, type=push, remItems=RemItems, - apiUrl=ApiUrl, token=Token} = State) -> - case lists:keyfind(Key, 1, RemItems) of - {Key, RemoteId, _Value} -> - ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), - case exec_req(ContactUrl, Token, #{}, delete) of +-spec insert_dst(remKey(), remKVP()|locKVP(), #state{}) -> {scrSoftError(), #state{}}. +% insert_dst(Key, Value, #state{type=push, apiUrl=ApiUrl, token=Token} = State) -> +% case exec_req(ApiUrl, Token, Value, post) of +% #{<<"id">> := _} = RemoteValue -> merge_meta_to_local(Key, RemoteValue, State); +% {error, unauthorized} -> reconnect_exec(State, insert_dst, [Key, Value]); +% {error, Error} -> {error, Error} +% end; +insert_dst(Id, {Id, {Value,Meta}}, #state{ name=Name, channel=Channel, type=pull + , keyPrefix=KeyPrefix, template=Template} = State) -> + Key = KeyPrefix ++ [new_local_id(Id)], + ?Info("insert_dst ~p",[Key]), + MergedValue = maps:merge(maps:merge(Template, Value), #{<<"META">> => #{Name=>Meta}}), + MergedBin = imem_json:encode(MergedValue), + case dperl_dal:write_channel(Channel, Key, MergedBin) of + ok -> + {false, State}; + {error, Error} -> + ?Error("insert_dst ~p~n~p~nresult ~p",[Key, MergedBin, {error, Error}]), + {true, State} + end. + +-spec update_dst(remKey(), remKVP()|locKVP(), #state{}) -> {scrSoftError(), #state{}}. +% update_dst(Key, Value, #state{name=Name, channel=Channel, type=push, +% remItems=RemItems, apiUrl=ApiUrl, token=Token} = State) -> +% case lists:keyfind(Key, 1, RemItems) of +% {Key, RemoteId, _Value} -> +% ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), +% case exec_req(ContactUrl, Token, Value, patch) of +% #{<<"id">> := _} = RemoteValue -> +% FormRemote = format_value(RemoteValue, Name), +% OldValue = dperl_dal:read_channel(Channel, Key), +% MergeValue = maps:merge(OldValue, FormRemote), +% MergedBin = imem_json:encode(MergeValue), +% dperl_dal:write_channel(Channel, Key, MergedBin), +% {false, State}; +% {error, unauthorized} -> +% reconnect_exec(State, update_dst, [Key, Value]); +% {error, Error} -> +% {error, Error} +% end; +% false -> +% {false, State} +% end; +update_dst(Id, {Id, {Value,Meta}}, #state{ name=Name, channel=Channel, keyPrefix=KeyPrefix + , type=pull, template=Template} = State) -> + Key = get_local_key(Id, Name, Channel, KeyPrefix), + ?Info("update_dst ~p",[Key]), + case dperl_dal:read_channel(Channel, Key) of + ?NOT_FOUND -> + ?JError("update_dst key ~p not found for remote id ~p", [Key, Id]), + {true, State}; + #{<<"META">> := OldMeta} = OldVal -> + NewMeta = maps:merge(OldMeta, #{Name => Meta}), + AllVal = maps:merge(Template, OldVal), + NewVal = maps:merge(AllVal, Value), + case update_local(Channel, Key, OldVal, NewVal, NewMeta) of ok -> - dperl_dal:remove_from_channel(Channel, Key), {false, State}; - {error, unauthorized} -> - reconnect_exec(State, delete_dst, [Key]); - Error -> - Error - end; - false -> - {false, State} - end; -delete_dst(Key, #state{channel=Channel} = State) -> + {error, _Error} -> + ?JError("update_dst cannot update key ~p to ~p", [Key, NewVal]), + {true, State} + end + end. + +%% update a local contact record to new value and new metadata. +%% create an audit log only if the value changes, not for a pure meta update. +%% this should avoid endless provisioning loops for metadata changes only. +-spec update_local(scrChannel(), locKey(), remVal(), locVal(), remMeta()) -> + {scrSoftError(), #state{}}. +update_local(Channel, Key, OldVal, OldVal, NewMeta) -> + MergedBin = imem_json:encode(OldVal#{<<"META">> => NewMeta}), + dperl_dal:write_channel_no_audit(Channel, Key, MergedBin); +update_local(Channel, Key, _OldVal, NewVal, NewMeta) -> + MergedBin = imem_json:encode(NewVal#{<<"META">> => NewMeta}), + dperl_dal:write_channel(Channel, Key, MergedBin). + +-spec delete_dst(remKey(), #state{}) -> {scrSoftError(), #state{}}. +% delete_dst(Key, #state{channel=Channel, type=push, remItems=RemItems, +% apiUrl=ApiUrl, token=Token} = State) -> +% case lists:keyfind(Key, 1, RemItems) of +% {Key, RemoteId, _Value} -> +% ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), +% case exec_req(ContactUrl, Token, #{}, delete) of +% ok -> +% dperl_dal:remove_from_channel(Channel, Key), +% {false, State}; +% {error, unauthorized} -> +% reconnect_exec(State, delete_dst, [Key]); +% Error -> +% Error +% end; +% false -> +% {false, State} +% end; +delete_dst(Id, #state{ name=Name, channel=Channel, keyPrefix=KeyPrefix + , type=pull, template=Template} = State) -> + Key = get_local_key(Id, Name, Channel, KeyPrefix), ?Info("delete_dst ~p",[Key]), - dperl_dal:remove_from_channel(Channel, Key), - {false, State}. - --spec update_dst(remKey(), remVal()|locVal(), #state{}) -> {scrSoftError(), #state{}}. -update_dst(Key, Value, #state{name=Name, channel=Channel, type=push, - remItems=RemItems, apiUrl=ApiUrl, token=Token} = State) -> - case lists:keyfind(Key, 1, RemItems) of - {Key, RemoteId, _Value} -> - ContactUrl = erlang:iolist_to_binary([ApiUrl, RemoteId]), - case exec_req(ContactUrl, Token, Value, patch) of - #{<<"id">> := _} = RemoteValue -> - FormRemote = format_value(RemoteValue, Name), - OldValue = dperl_dal:read_channel(Channel, Key), - MergeValue = maps:merge(OldValue, FormRemote), - MergedBin = imem_json:encode(MergeValue), - dperl_dal:write_channel(Channel, Key, MergedBin), + case fetch_dst(Id, State) of + ?NOT_FOUND -> + {true, state}; + {Id, {Value, Meta}} -> + case maps:without(Name, Meta) of + #{} -> % no other syncs remaining for this key + dperl_dal:remove_from_channel(Channel, Key), {false, State}; - {error, unauthorized} -> - reconnect_exec(State, update_dst, [Key, Value]); - {error, Error} -> - {error, Error} - end; - false -> - {false, State} - end; -update_dst(Key, Value, #state{channel=Channel} = State) when is_map(Value) -> - OldValue = dperl_dal:read_channel(Channel, Key), - MergeValue = maps:merge(OldValue, Value), - MergedBin = imem_json:encode(MergeValue), - dperl_dal:write_channel(Channel, Key, MergedBin), - {false, State}. + NewMeta -> % other syncs remaining for this key + NewVal = maps:merge(Template, Value), + case update_local(Channel, Key, Value, NewVal, NewMeta) of + ok -> + {false, State}; + {error, _Error} -> + ?JError("delete_dst cannot delete key ~p", [Key]), + {true, State} + end + end + end. report_status(_Key, _Status, _State) -> no_op. -load_dst_after_key(CurKey, BlkCount, #state{type=pull, keyPrefix=KeyPrefix} = State) when CurKey < KeyPrefix -> - load_dst_after_key(KeyPrefix, BlkCount, State); -load_dst_after_key(CurKey, BlkCount, #state{channel=Channel, type=pull, keyPrefix=KeyPrefix}) -> - Filter = fun({K,_}) -> lists:prefix(KeyPrefix,K) end, - lists:filter(Filter, dperl_dal:read_gt(Channel, CurKey, BlkCount)). +-spec load_dst_after_key(remKVP() | locKVP(), scrBatchSize(), #state{}) -> {ok, locKVPs(), #state{}} | {error, term(), #state{}}. +load_dst_after_key({Id,{_,_}}, BlkCount, #state{name=Name, channel=Channel, type=pull, keyPrefix=KeyPrefix}) -> + {ok, read_local_kvps_after_id(Channel, Name, KeyPrefix, Id, BlkCount, [])}; +load_dst_after_key(_Key, BlkCount, #state{name=Name, channel=Channel, type=pull, keyPrefix=KeyPrefix} = State) -> + ?Info("load_dst_after_key for non-matching (initial) key ~p", [_Key]), + case read_local_kvps_after_id(Channel, Name, KeyPrefix, ?COMPARE_MIN_KEY, BlkCount, []) of + L when is_list(L) -> {ok, L, State}; + {error, Reason} -> {error, Reason, State} + end. -load_src_after_key(CurKey, BlkCount, #state{type=pull, fetchUrl=undefined, apiUrl=ApiUrl} = State) -> +%% starting after Id, run through remoteId index and collect a block of locKVP() data belonging to +%% this job name and keyPrefix +-spec read_local_kvps_after_id(scrChannel(), jobName(), locKey(), remKey(), scrBatchSize(), locKVPs()) -> locKVPs() | {error, term()}. +read_local_kvps_after_id(_Channel, _Name, _KeyPrefix, _Id, _BlkCount, _Acc) -> + % lists:prefix(KeyPrefix,K) + []. + +-spec load_src_after_key(remKVP()| locKVP(), scrBatchSize(), #state{}) -> + {ok, remKVPs(), #state{}} | {error, term(), #state{}}. +load_src_after_key(_CurKVP, _BlkCount, #state{type=pull, fetchUrl=finished} = State) -> + {ok, [], State}; +load_src_after_key(CurKVP, BlkCount, #state{type=pull, fetchUrl=undefined, apiUrl=ApiUrl} = State) -> UrlParams = dperl_dal:url_enc_params(#{"$top" => integer_to_list(BlkCount)}), - ContactsUrl = erlang:iolist_to_binary([ApiUrl, "?", UrlParams]), - load_src_after_key(CurKey, BlkCount, State#state{fetchUrl=ContactsUrl}); -load_src_after_key(CurKey, BlkCount, #state{name=Name, type=pull, isCleanupFinished=true, - keyPrefix=KeyPrefix, token=Token, fetchUrl=FetchUrl} = State) -> - case fetch_all_contacts(FetchUrl, Token, KeyPrefix, Name) of - {ok, Contacts} -> - load_src_after_key(CurKey, BlkCount, State#state{remItems=Contacts, isCleanupFinished=false}); - {error, unauthorized} -> - reconnect_exec(State, load_src_after_key, [CurKey, BlkCount]); - {error, Error} -> - {error, Error, State} - end; -load_src_after_key(CurKey, BlkCount, #state{type=pull, remItems=Contacts} = State) -> - {ok, get_contacts_gt(CurKey, BlkCount, Contacts), State}. + ContactsUrl = lists:flatten([ApiUrl, "?", UrlParams]), + load_src_after_key(CurKVP, BlkCount, State#state{fetchUrl=ContactsUrl}); +load_src_after_key(CurKVP, BlkCount, #state{ name=Name, type=pull, token=Token + , fetchUrl=FetchUrl} = State) -> + ?JTrace("Fetching contacts with url : ~s", [FetchUrl]), + case exec_req(FetchUrl, Token) of + {error, unauthorized} -> reconnect_exec(State, load_src_after_key, [CurKVP, BlkCount]); + {error, Error} -> {error, Error, State}; + #{<<"@odata.nextLink">> := NextUrl, <<"value">> := RemVals} -> + KVPs = [remote_kvp(RemVal, Name) || RemVal <- RemVals], + ?JTrace("Fetched contacts : ~p", [length(KVPs)]), + %?Info("First fetched contact : ~p", [element(1,hd(KVPs))]), + {ok, KVPs, State#state{fetchUrl=NextUrl}}; + #{<<"value">> := RemVals} -> % may be an empty list + KVPs = [remote_kvp(RemVal, Name) || RemVal <- RemVals], + ?JTrace("Last fetched contacts : ~p", [length(KVPs)]), + {ok, KVPs, State#state{fetchUrl=finished}} + end. +-spec reconnect_exec(#state{}, fun(), list()) -> + {scrSoftError(), #state{}} | {ok, remKVPs(), #state{}} | {error, term(), #state{}}. reconnect_exec(State, Fun, Args) -> - case connect_check_src(State#state{isConnected = false}) of - {ok, State1} -> - erlang:apply(?MODULE, Fun, Args ++ [State1]); - {error, Error, State1} -> - {error, Error, State1} + case connect_check_src(State#state{isConnected=false}) of + {ok, State1} -> erlang:apply(?MODULE, Fun, Args ++ [State1]); + {error, Error, State1} -> {error, Error, State1} end. --spec do_cleanup(remKeys(), remKeys(), remKeys(), boolean(), #state{}) -> {ok, #state{}}. -do_cleanup(_Deletes, _Inserts, _Diffs, _IsFinished, #state{type = push}) -> - {error, <<"cleanup only for pull job">>}; -do_cleanup(Deletes, Inserts, Diffs, IsFinished, State) -> - NewState = State#state{dirtyKeys=Inserts++Diffs++Deletes}, - if IsFinished -> {ok, finish, NewState#state{isCleanupFinished=true}}; - true -> {ok, NewState} +% execute cleanup/refresh for found differences (Deletes, Inserts and value Diffs) +-spec do_cleanup(remKeys(), remKeys(), remKeys(), boolean(), #state{}) -> + {ok, #state{}} | {ok, finish, #state{}} . +do_cleanup(Deletes, Inserts, Diffs, IsFinished, #state{type=pull} = State) -> + NewState = State#state{cycleBuffer=Deletes++Diffs++Inserts}, + if + IsFinished -> + %% deposit cleanup batch dirty results in state for sync to pick up + %% confirm finished cleanup cycle (last Diffs to sync in cycle) + %% re-arm cleanup fetching to restart with top rows in id order + {ok, finish, NewState#state{fetchUrl=undefined}}; + true -> + %% deposit cleanup batch dirty results in state for sync to pick up + {ok, NewState} end. get_status(#state{}) -> #{}. @@ -409,12 +513,16 @@ init({#dperlJob{ name=Name, srcArgs=#{apiUrl:=ApiUrl}, args=Args , vnf = <<"fun imem_index:vnf_identity/1.">> , iff = ContactIff}, dperl_dal:create_check_index(ChannelBin, [IdxContact]), + Template = case dperl_dal:read_channel(Channel, KeyPrefix) of + ?NOT_FOUND -> #{}; + T when is_map(T) -> T + end, case dderl_oauth:get_token_info(AccountId, TokenPrefix, ?SYNC_OFFICE365) of #{<<"access_token">>:=Token} -> {ok, State#state{ name=Name, type=Type, channel=ChannelBin, keyPrefix=KeyPrefix , apiUrl=ApiUrl, tokenPrefix=TokenPrefix - , token=Token, accountId = AccountId - , template=dperl_dal:read_channel(Channel, KeyPrefix)}}; + , token=Token, accountId=AccountId + , template=Template}}; _ -> ?JError("Access token not found for ~p at ~p", [AccountId, TokenPrefix]), {stop, badarg} @@ -442,39 +550,9 @@ terminate(Reason, _State) -> %% private functions -%% Fetch all remote contacts, create 3-tuple {Key::list(), RemoteId::binary(), RemoteValue::map()) -%% Sort by Key (needed for sync) -fetch_all_contacts(Url, Token, KeyPrefix, JobName) -> - fetch_all_contacts(Url, Token, KeyPrefix, JobName, []). - -fetch_all_contacts(Url, Token, KeyPrefix, JobName, AccContacts) -> - ?JTrace("Fetching contacts with url : ~s", [Url]), - ?JTrace("Fetched contacts : ~p", [length(AccContacts)]), - case exec_req(Url, Token) of - #{<<"@odata.nextLink">> := NextUrl, <<"value">> := MoreContacts} -> - Contacts = format_remote_values_to_kv(MoreContacts, KeyPrefix, JobName), - fetch_all_contacts(NextUrl, Token, KeyPrefix, lists:append(Contacts, AccContacts)); - #{<<"value">> := MoreContacts} -> - Contacts = format_remote_values_to_kv(MoreContacts, KeyPrefix, JobName), - {ok, lists:keysort(1, lists:append(Contacts, AccContacts))}; - {error, Error} -> - {error, Error} - end. - -get_contacts_gt(CurKey, BlkCount, Contacts) -> - get_contacts_gt(CurKey, BlkCount, Contacts, []). - -get_contacts_gt(_CurKey, _BlkCount, [], Acc) -> lists:reverse(Acc); -get_contacts_gt(_CurKey, BlkCount, _Contacts, Acc) when length(Acc) == BlkCount -> - lists:reverse(Acc); -get_contacts_gt(CurKey, BlkCount, [{Key, _} | Contacts], Acc) when Key =< CurKey -> - get_contacts_gt(CurKey, BlkCount, Contacts, Acc); -get_contacts_gt(CurKey, BlkCount, [Contact | Contacts], Acc) -> - get_contacts_gt(CurKey, BlkCount, Contacts, [Contact | Acc]). - --spec exec_req(Url::binary()|string(), Token::binary()) -> tuple(). -exec_req(Url, Token) when is_binary(Url) -> - exec_req(binary_to_list(Url), Token); +%% get a json object from the cloud service and convert it to a map +%% use OAuth2 header with token +-spec exec_req(string(), binary()) -> map() | {error, term()}. exec_req(Url, Token) -> AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(Token)}], case httpc:request(get, {Url, AuthHeader}, [], []) of @@ -482,17 +560,21 @@ exec_req(Url, Token) -> imem_json:decode(list_to_binary(Result), [return_maps]); {ok, {{_, 401, _}, _, _}} -> {error, unauthorized}; - Error -> - {error, Error} + {error, Reason} -> + ?Info("exec_req get ~p returns error ~p",[Url,Reason]), + {error, Reason} end. --spec exec_req(Url::binary()|string(), Token::binary(), Body::map(), Method::atom()) -> tuple(). -exec_req(Url, Token, Body, Method) when is_binary(Url), is_map(Body) -> - exec_req(binary_to_list(Url), Token, Body, Method); +%% emit a cloud service request and convert json result into a map. +%% The request body is cast from a map. +%% use OAuth2 header with token +-spec exec_req(string(), binary(), map(), atom()) -> ok | map() | {error, term()}. exec_req(Url, Token, Body, Method) -> AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(Token)}], - % Headers = [AuthHeader, {"Contnet-type", "application/json"}], - case httpc:request(Method, {Url, AuthHeader, "application/json", imem_json:encode(Body)}, [], []) of + case httpc:request(Method, + {Url, AuthHeader, "application/json", imem_json:encode(Body)}, + [], + []) of {ok, {{_, 201, _}, _, Result}} -> % create/post result imem_json:decode(list_to_binary(Result), [return_maps]); diff --git a/src/dperl/jobs/dpjob_ouraring_crawl.erl b/src/dperl/jobs/dpjob_ouraring_crawl.erl index 5d8f7233..a83dc02d 100644 --- a/src/dperl/jobs/dpjob_ouraring_crawl.erl +++ b/src/dperl/jobs/dpjob_ouraring_crawl.erl @@ -1,6 +1,6 @@ -module(dpjob_ouraring_crawl). -%% implements scr puller callback for OuraRing cloud data as a cleanupCumRefresh job (c/r) +%% implements scr puller callback for OuraRing cloud data as a cleanup/refresh job (c/r) %% avoids pulling incomplete intra-day data by not pulling beyond yesterday's data %% the default "OuraRing" identifier can be altered to camouflage the nature of the data %% this also supports multiple OuraRing pullers into the same table, if needed @@ -27,7 +27,7 @@ , lastReadinessDay := maybeDate()}. % relevant cleanup status -type token() :: binary(). % OAuth access token (refreshed after 'unauthorized') --define(METRICS, ["userinfo", "activity", "readiness", "sleep"]). +-define(METRICS, ["userinfo", "readiness", "sleep", "activity"]). -define(OAUTH2_CONFIG(__JOB_NAME), ?GET_CONFIG(oAuth2Config,[__JOB_NAME], @@ -161,16 +161,19 @@ insert_dst(Key, Val, State) -> report_status(_Key, _Status, _State) -> no_op. +% execute simple cleanup for next batch of KVPs -spec do_cleanup(#state{}, scrBatchSize()) -> {ok, #state{}} | {ok, finish, #state{}} | {error, term(), #state{}}. -do_cleanup(#state{cycleBuffer=CycleBuffer} = State, _BlkCount) -> +do_cleanup(State, _BlkCount) -> case fetch_metrics(?METRICS, State) of - {ok, State2} -> + {ok, #state{cycleBuffer=CycleBuffer} = State1} -> case CycleBuffer of [_] -> % only userinfo remains. we are done - {ok, finish, State2}; + %?Info("do_cleanup is finished with ~p", [CycleBuffer]), + {ok, finish, State1}; _ -> % other items in the list, continue - {ok, State2} + %?Info("do_cleanup continues with ~p", [CycleBuffer]), + {ok, State1} end; {error, Error} -> {error, Error, State#state{isConnected=false}} @@ -324,11 +327,17 @@ fetch_metric(Metric, DayQuery, ApiUrl, Token) -> Url = ApiUrl ++ Metric ++ DayQuery, MetricBin = list_to_binary(Metric), case exec_req(Url, Token) of - #{MetricBin:=[]} -> none; - Value when is_map(Value) -> {ok, Value}; - {error, Error} -> {error, Error} + #{MetricBin:=[]} = _R -> + %?Info("fetch_metric ~p ~p result none ~p",[Metric, DayQuery, _R]), + none; + Value when is_map(Value) -> + %?Info("fetch_metric ~p ~p result ok",[Metric, DayQuery]), + {ok, Value}; + {error, Error} -> + %?Info("fetch_metric ~p ~p result error ~p",[Metric, DayQuery, Error]), + {error, Error} end. - + %% fetch userinfo from Oura cloud and add it to the CycleBuffer %% userinfo comes as latest value only, no day history available -spec fetch_userinfo(#state{}) ->{ok, #state{}} | {error, term()}. @@ -336,6 +345,7 @@ fetch_userinfo(#state{keyPrefix=KeyPrefix, apiUrl=ApiUrl, token=Token, cycleBuff case exec_req(ApiUrl ++ "userinfo", Token) of UserInfo when is_map(UserInfo) -> KVP = {build_key(KeyPrefix, "userinfo"), UserInfo}, + %?Info("fetch_userinfo adds ~p to ~p",[KVP,CycleBuffer]), {ok, State#state{cycleBuffer=[KVP|CycleBuffer]}}; {error, Error} -> ?JError("Error fetching userinfo : ~p", [Error]), From ce16b94832ed66740d54498b936c325c6f5c99a8 Mon Sep 17 00:00:00 2001 From: stoch Date: Tue, 30 Jun 2020 19:17:56 +0200 Subject: [PATCH 64/72] improvements on dpjob_office_365 WIP --- src/dderl_oauth.erl | 1 + src/dperl/dperl_dal.erl | 22 +++ src/dperl/dperl_strategy_scr.erl | 212 +++++++++++++++++----------- src/dperl/jobs/dpjob_office_365.erl | 44 ++++-- 4 files changed, 191 insertions(+), 88 deletions(-) diff --git a/src/dderl_oauth.erl b/src/dderl_oauth.erl index 438ac01b..3477d2c6 100644 --- a/src/dderl_oauth.erl +++ b/src/dderl_oauth.erl @@ -18,6 +18,7 @@ set_token_info(AccountId, TokenPrefix, TokenInfo, SyncType) when is_map(TokenInf set_token_info(AccountId, TokenPrefix, TokenInfo, SyncType) when is_list(TokenInfo) -> set_token_info(AccountId, TokenPrefix, list_to_binary(TokenInfo), SyncType); set_token_info(AccountId, TokenPrefix, TokenInfo, _SyncType) when is_binary(TokenInfo) -> + ?Info("set_token_info using ~p",[imem_enc_mnesia:get_enc_hash()]), dderl_dal:write_to_avatar_channel(AccountId, TokenPrefix ++ [?TOKEN_KEYPART], TokenInfo). get_authorize_url(XSRFToken, AuthConfig, SyncType) -> diff --git a/src/dperl/dperl_dal.erl b/src/dperl/dperl_dal.erl index 383c6dda..8bc01da8 100644 --- a/src/dperl/dperl_dal.erl +++ b/src/dperl/dperl_dal.erl @@ -72,6 +72,7 @@ ,read_channel_index/2 ,read_channel_index_key/2 ,read_channel_index_key_prefix/3 + ,read_channel_index_key_prefix_gt/4 ]). check_table(Table, ColumnNames, ColumnTypes, DefaultRecord, Opts) -> @@ -154,6 +155,27 @@ read_channel_index_key_prefix(Channel, Stu, Prefix) -> F = fun(X) -> lists:prefix(Prefix,X) end, lists:filter(F, read_channel_index_key(Channel, Stu)). +read_channel_index_key_prefix_gt(Channel, Stu, KeyPrefix, BlkCount) -> + read_channel_index_key_prefix_gt(imem_meta:index_table(Channel), Stu, KeyPrefix, BlkCount, []). + +read_channel_index_key_prefix_gt(_IndexTable, _Stu, _KeyPrefix, 0, Acc) -> lists:reverse(Acc); +read_channel_index_key_prefix_gt(IndexTable, Stu, KeyPrefix, More, Acc) -> + case imem_meta:dirty_next(IndexTable, Stu) of + '$end_of_table' -> + lists:reverse(Acc); + Next -> + [Row] = imem_meta:read(IndexTable, Next), + Key = sext:decode(Row#ddIndex.lnk), + case lists:prefix(KeyPrefix, Key) of + true -> + read_channel_index_key_prefix_gt( IndexTable, Next, KeyPrefix, + More-1, [{element(2,Next),Key}|Acc]); + false -> + read_channel_index_key_prefix_gt( IndexTable, Next, KeyPrefix, + More, Acc) + end + end. + read_channel_raw(Channel, Key) when is_list(Channel) -> read_channel_raw(list_to_binary(Channel), Key); read_channel_raw(Channel, Key) when is_binary(Channel) -> diff --git a/src/dperl/dperl_strategy_scr.erl b/src/dperl/dperl_strategy_scr.erl index b5c303ea..47e08614 100644 --- a/src/dperl/dperl_strategy_scr.erl +++ b/src/dperl/dperl_strategy_scr.erl @@ -42,14 +42,22 @@ % fetch one item from source (if it exists) % return scrAnyVal() for cleanup % return scrAnyKeyVal() for c/r +% the error pattern decides on the logging details -callback fetch_src(scrAnyKey() | scrAnyKeyVal() , scrState()) -> - ?NOT_FOUND | scrAnyVal() | scrAnyKeyVal(). + ?NOT_FOUND | scrAnyVal() | scrAnyKeyVal() | + {error, term(), scrState()} | + {error, term()} | + error. % fetch one item from destination (if it exists) % return scrAnyVal() for cleanup % return scrAnyKeyVal() for c/r +% the error pattern decides on the logging details -callback fetch_dst(scrAnyKey() | scrAnyKeyVal(), scrState()) -> - ?NOT_FOUND | scrAnyVal() | scrAnyKeyVal(). + ?NOT_FOUND | scrAnyVal() | scrAnyKeyVal() | + {error, term(), scrState()} | + {error, term()} | + error. % delete one item from destination (if it exists) % scrSoftError() = true signals that a soft error happened which is skipped without throwing @@ -99,7 +107,11 @@ scrState()) -> true | false. -% override for value compare function +% Compare function, used to loosen compare semantics, ignoring some differences if needed. +% Comparisons will depend on the (FROZEN) callback state just before processing doing batch +% comparison (sync / cleanup / refresh). This must be taken into account in the optional +% override is_equal/4 in the callback module. +% is_equal() is not called and assumed to be true if Erlang compares values/KVPs equal -callback is_equal(scrAnyKey() | scrAnyKeyVal(), scrAnyVal(), scrAnyVal(), scrState()) -> true | false. @@ -154,17 +166,20 @@ {error, term()}. % bulk load one batch of keys (for cleanup) or kv-pairs (cleanup/refresh) from source. -% up to scrBatchSize() existing keys must be returned in key order. +% up to scrBatchSize() existing keys MUST be returned in key order. +% Postponing trailing keys to next round is not permitted. % A callback module implementing this and also the load_dst_after_key callback signals that it wants % to do override source loading for cleanup or c/r combined processing. % Returning less than scrBatchSize() items does not prevent further calls of this function. -% If called again in same cycle, {ok, [], scrState()} must be returned. +% In that case, if called again in same cycle, {ok, [], scrState()} must be returned. -callback load_src_after_key(LastSeen::scrAnyKey()|scrAnyKeyVal(), scrBatchSize(), scrState()) -> {ok, scrAnyKeys(), scrState()} | {ok, scrAnyKeyVals(), scrState()} | {error, term(), scrState()}. % bulk load one batch of kv-pairs for combined cleanup/refresh from destination. +% up to scrBatchSize() existing keys MUST be returned in key order. +% Postponing trailing keys to next round is not permitted. % A callback module implementing this and load_src_after_key signals that it wants % to do a cleanup/refresh combined processing. % Returning less than scrBatchSize() items does not prevent further calls of this function. @@ -388,7 +403,7 @@ execute(cleanup, Mod, Job, State, #{cleanup:=true} = Args) -> #{minKey:=MinKey, maxKey:=MaxKey, lastKey:=LastKey} = CleanupState, Ctx = #cleanup_ctx{ minKey=MinKey, maxKey=MaxKey, lastKey=LastKey , bulkCount=CleanupBulkCount}, - {RefreshCollectResult, State2} = cleanup_refresh_collect(Mod,Ctx,State1), + {RefreshCollectResult, State2} = cleanup_refresh_collect(Mod, Ctx, State1), Deletes = maps:get(deletes,RefreshCollectResult), Inserts = maps:get(inserts,RefreshCollectResult), Diffs = maps:get(differences,RefreshCollectResult), @@ -680,63 +695,73 @@ process_events(Keys, Mod, State) -> true -> Mod:should_sync_log(State); false -> true end, - process_events(Keys, Mod, State, ShouldLog, false). + IsEqualFun = make_is_equal_fun(Mod, State), + %% Note: Mod:is_equal() will see the current (FROZEN!) scrState() + process_events(Keys, Mod, State, ShouldLog, IsEqualFun, false). --spec process_events(scrAnyKeys(), jobModule(), scrState(), boolean(), boolean()) -> +-spec process_events(scrAnyKeys(), jobModule(), scrState(), boolean(), fun(), boolean()) -> {boolean(), scrState()}. -process_events([], Mod, State, _ShouldLog, IsError) -> +process_events([], Mod, State, _ShouldLog, _IsEqualFun, IsError) -> case erlang:function_exported(Mod, finalize_src_events, 1) of true -> execute_prov_fun(no_log, Mod, finalize_src_events, [State], false, IsError); false -> {IsError, State} end; -process_events([Key | Keys], Mod, State, ShouldLog, IsError) -> +process_events([Key | Keys], Mod, State, ShouldLog, IsEqualFun, IsError) -> + % Both values/KVPs are fetched again in order to avoid race conditions {NewIsError, NewState} = case {Mod:fetch_src(Key, State), Mod:fetch_dst(Key, State)} of - {S, S} -> %% nothing to do + {S, S} -> % exactly equal Erlang terms, nothing to do Mod:report_status(Key, no_op, State), {IsError, State}; - {{protected, _}, ?NOT_FOUND} -> % pusher protection + {{protected, _}, ?NOT_FOUND} -> % pusher protection ?JError("Protected ~p is not found on target", [Key]), Error = <<"Protected key is not found on target">>, Mod:report_status(Key, Error, State), dperl_dal:job_error(Key, <<"sync">>, <<"process_events">>, Error), {true, State}; - {{protected, S}, D} -> % pusher protection + {{protected, S}, D} -> % pusher protection execute_prov_fun("Protected", Mod, update_channel, [Key, true, S, D, State], ShouldLog, IsError, check); - {{protected, IsSamePlatform, S}, D} -> % puller protection + {{protected, IsSamePlatform, S}, D} -> % puller protection execute_prov_fun("Protected", Mod, update_channel, [Key, IsSamePlatform, S, D, State], ShouldLog, IsError, check); - {?NOT_FOUND, _D} -> execute_prov_fun("Deleted", Mod, delete_dst, [Key, State], ShouldLog, IsError); - {S, ?NOT_FOUND} -> execute_prov_fun("Inserted", Mod, insert_dst, [Key, S, State], ShouldLog, IsError); - {error, _} -> {true, State}; - {_, error} -> {true, State}; - {{error, _} = Error, _} -> + {?NOT_FOUND, _D} -> % orphan + execute_prov_fun("Deleted", Mod, delete_dst, [Key, State], ShouldLog, IsError); + {S, ?NOT_FOUND} -> % missing + execute_prov_fun("Inserted", Mod, insert_dst, [Key, S, State], ShouldLog, IsError); + {error, _} -> {true, State}; % src fetch error + {{error, _} = Error, _} -> % src fetch {error, term()} ?JError("Fetch src ~p : ~p", [Key, Error]), Mod:report_status(Key, Error, State), {true, State}; - {_, {error, _} = Error} -> - ?JError("Fetch dst ~p : ~p", [Key, Error]), - Mod:report_status(Key, Error, State), - {true, State}; - {{error, Error, State1}, _} -> + {{error, Error, State1}, _} -> % src fetch {error, term(), scrState()} ?JError("Fetch src ~p : ~p", [Key, Error]), Mod:report_status(Key, {error, Error}, State1), {true, State1}; - {_, {error, Error, State1}} -> + {_, error} -> {true, State}; % dst fetch error + {_, {error, _} = Error} -> % dst fetch {error, term()} + ?JError("Fetch dst ~p : ~p", [Key, Error]), + Mod:report_status(Key, Error, State), + {true, State}; + {_, {error, Error, State1}} -> % dst fetch {error, term(), scrState()} ?JError("Fetch dst ~p : ~p", [Key, Error]), Mod:report_status(Key, {error, Error}, State1), {true, State1}; - {S, D} -> - DiffFun = case erlang:function_exported(Mod, is_equal, 4) of - true -> fun Mod:is_equal/4; - false -> fun is_equal/4 - end, - case DiffFun(Key, S, D, State) of - false -> execute_prov_fun("Updated", Mod, update_dst, [Key, S, State], ShouldLog, IsError); + {S, D} -> % need to invoke is_equal() + case IsEqualFun(Key, S, D) of + false -> + ?Info("process_events diff detected key=~p~n~p~n~p",[Key, S, D]), + execute_prov_fun("Updated", Mod, update_dst, [Key, S, State], ShouldLog, IsError); true -> Mod:report_status(Key, no_op, State), {IsError, State} end end, - process_events(Keys, Mod, NewState, ShouldLog, NewIsError). + process_events(Keys, Mod, NewState, ShouldLog, IsEqualFun, NewIsError). + +-spec make_is_equal_fun(module(), scrState()) -> fun(). +make_is_equal_fun(Mod, State) -> + case erlang:function_exported(Mod, is_equal, 4) of + true -> fun(Key,S,D) -> Mod:is_equal(Key, S, D, State) end; + false -> fun(Key,S,D) -> is_equal(Key, S, D, State) end + end. -spec execute_prov_fun(scrOperation(), jobModule(), atom(), list(), boolean(), boolean()) -> {scrSoftError(), scrState() | term()}. @@ -770,7 +795,7 @@ execute_prov_fun(Op, Mod, Fun, Args, ShouldLog, IsError, check) -> end. -spec cleanup_refresh_collect(jobModule(), #cleanup_ctx{}, scrState()) -> - #{deletes => scrAnyKeyVals(), inserts => scrAnyKeyVals(), lastKey => scrAnyKeyVal()}. + {#{deletes => scrAnyKeyVals(), inserts => scrAnyKeyVals(), lastKey => scrAnyKeyVal()}, scrState()}. cleanup_refresh_collect(Mod, CleanupCtx, State) -> % note: Key used here in the sense of key-value-pair (KVP) where the value can itself % be structured (e.g. {Content::map(), Meta::map()} in Office365 contact sync) @@ -798,69 +823,96 @@ cleanup_refresh_collect(Mod, CleanupCtx, State) -> error({step_failed, State3}); DKeys -> {DKeys, State2} % deprecated simple API which returns the kv-pairs only end, - {cleanup_refresh_compare(CleanupCtx#cleanup_ctx{ srcKeys=SrcKeys, srcCount=length(SrcKeys) - , dstKeys=DstKeys, dstCount=length(DstKeys) - , lastKey=CurKey}), State4}. - --spec cleanup_refresh_compare(#cleanup_ctx{}) -> #{ deletes=>scrAnyKeys(), differences=>scrAnyKeys() - , inserts=>scrAnyKeys(), lastKey=>scrAnyKey()}. -cleanup_refresh_compare(#cleanup_ctx{ srcKeys=SrcKeys, dstKeys=[] - , deletes=Deletes,inserts=Inserts, differences=Diffs - , srcCount=SrcCount, dstCount=DstCount, bulkCount=BulkCnt - , minKey=MinKey}) + CpResult = cleanup_refresh_compare( + make_is_equal_fun(Mod, State), + CleanupCtx#cleanup_ctx{ srcKeys=SrcKeys, srcCount=length(SrcKeys) + , dstKeys=DstKeys, dstCount=length(DstKeys) + , lastKey=CurKey}), + {CpResult, State4}. + +-spec cleanup_refresh_compare( fun(), #cleanup_ctx{}) -> #{ deletes=>scrAnyKeys() + , differences=>scrAnyKeys() + , inserts=>scrAnyKeys() + , lastKey=>scrAnyKey()}. +cleanup_refresh_compare(_, #cleanup_ctx{ srcKeys=SrcKeys, dstKeys=[] + , deletes=Deletes, inserts=Inserts, differences=Diffs + , srcCount=SrcCount, dstCount=DstCount, bulkCount=BulkCnt + , minKey=MinKey}) when DstCount < BulkCnt, SrcCount < BulkCnt -> - Remaining = take_keys(SrcKeys), + Remaining = take_keys(SrcKeys), % no more dstKeys and all srcKeys fetched -> end of cycle #{deletes=>Deletes, differences=>Diffs, inserts=>Inserts++Remaining, lastKey=>MinKey}; -cleanup_refresh_compare(#cleanup_ctx{ srcKeys=SrcKeys, dstKeys=[] - , deletes=Deletes, inserts=Inserts, differences=Diffs - , dstCount=DstCount}) + +cleanup_refresh_compare(_, #cleanup_ctx{ srcKeys=SrcKeys, dstKeys=[] + , deletes=Deletes, inserts=Inserts, differences=Diffs + , dstCount=DstCount}) when DstCount == 0 -> - Remaining = take_keys(SrcKeys), + Remaining = take_keys(SrcKeys), % no dstKeys but more srcKeys -> another batch needed #{deletes=>Deletes, differences=>Diffs, inserts=>Inserts++Remaining, lastKey=>last_key(SrcKeys)}; -cleanup_refresh_compare(#cleanup_ctx{ dstKeys=[] - , deletes=Deletes, inserts=Inserts, differences=Diffs - , lastKey=LK}) -> + +cleanup_refresh_compare(_, #cleanup_ctx{ dstKeys=[] + , deletes=Deletes, inserts=Inserts, differences=Diffs + , lastKey=LK}) -> + % no more data but not complete, another batch needed #{deletes=>Deletes, differences=>Diffs, inserts=>Inserts, lastKey=>LK}; -cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[], dstKeys=DstKeys, minKey=MinKey - , deletes=Deletes, inserts=Inserts, differences=Diffs - , srcCount=SrcCount, dstCount=DstCount, bulkCount=BulkCnt}) + +cleanup_refresh_compare(_, #cleanup_ctx{ srcKeys=[], dstKeys=DstKeys, minKey=MinKey + , deletes=Deletes, inserts=Inserts, differences=Diffs + , srcCount=SrcCount, dstCount=DstCount, bulkCount=BulkCnt}) when SrcCount < BulkCnt, DstCount < BulkCnt -> - Remaining = take_keys(DstKeys), + Remaining = take_keys(DstKeys), % no more srcKeys and all dstKeys fetched -> end of cycle #{deletes=>Deletes++Remaining, differences=>Diffs, inserts=>Inserts, lastKey=>MinKey}; -cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[], dstKeys=DstKeys - , deletes=Deletes, inserts=Inserts, differences=Diffs - , srcCount=SrcCount}) + +cleanup_refresh_compare(_, #cleanup_ctx{ srcKeys=[], dstKeys=DstKeys + , deletes=Deletes, inserts=Inserts, differences=Diffs + , srcCount=SrcCount}) when SrcCount == 0 -> - Remaining = take_keys(DstKeys), + Remaining = take_keys(DstKeys), % no srcKeys but more dstKeys -> another batch needed #{deletes=>Deletes++Remaining, differences=>Diffs, inserts=>Inserts, lastKey=>last_key(DstKeys)}; -cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[] - , deletes=Deletes, inserts=Inserts, differences=Diffs - , lastKey=LK}) -> + +cleanup_refresh_compare(_, #cleanup_ctx{ srcKeys=[] + , deletes=Deletes, inserts=Inserts, differences=Diffs + , lastKey=LK}) -> + % no more data but not complete, another batch needed #{deletes=>Deletes, differences=>Diffs, inserts=>Inserts, lastKey=>LK}; -cleanup_refresh_compare(#cleanup_ctx{srcKeys=[K|SrcKeys], dstKeys=[K|DstKeys]} = CleanupCtx) -> - cleanup_refresh_compare(CleanupCtx#cleanup_ctx{ srcKeys=SrcKeys, dstKeys=DstKeys - , lastKey=last_key([K])}); -cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[{K, _}|SrcKeys], dstKeys=[{K, _}|DstKeys] - , differences=Diffs} = CleanupCtx) -> - cleanup_refresh_compare(CleanupCtx#cleanup_ctx{ srcKeys=SrcKeys, dstKeys=DstKeys - , lastKey=K, differences=[K|Diffs]}); -cleanup_refresh_compare(#cleanup_ctx{ srcKeys=[SK|SrcKeys], dstKeys=[DK|DstKeys] - , inserts=Inserts, deletes=Deletes} = CleanupCtx) -> - case {last_key([SK]), last_key([DK])} of - {K1, K2} when K1 < K2 -> cleanup_refresh_compare( + +cleanup_refresh_compare(IE, #cleanup_ctx{srcKeys=[K|SrcKeys], dstKeys=[K|DstKeys]} = CleanupCtx) -> + % exact kv-match, no_diff, recurse one item forward + cleanup_refresh_compare(IE, CleanupCtx#cleanup_ctx{ srcKeys=SrcKeys, dstKeys=DstKeys + , lastKey=last_key([K])}); + +cleanup_refresh_compare(IE, #cleanup_ctx{ srcKeys=[{K,S}|SrcKeys], dstKeys=[{K,D}|DstKeys] + , differences=Diffs} = CleanupCtx) -> + % key match but no exact value match + case IE(K, {K,S}, {K,D}) of + true -> % values compare equal -> recurse one item forward + cleanup_refresh_compare(IE, CleanupCtx#cleanup_ctx{ srcKeys=SrcKeys, dstKeys=DstKeys + , lastKey=K, differences=Diffs}); + false -> % values different -> add diff and recurse one item forward + cleanup_refresh_compare(IE, CleanupCtx#cleanup_ctx{ srcKeys=SrcKeys, dstKeys=DstKeys + , lastKey=K, differences=[K|Diffs]}) + end; + +cleanup_refresh_compare(IE, #cleanup_ctx{ srcKeys=[SK|SrcKeys], dstKeys=[DK|DstKeys] + , inserts=Inserts, deletes=Deletes} = CleanupCtx) -> + % keys are different, compare keys + K1 = take_key(SK), + K2 = take_key(DK), + if + K1 < K2 -> cleanup_refresh_compare(IE, CleanupCtx#cleanup_ctx{srcKeys=SrcKeys, inserts=[K1|Inserts], lastKey=K1}); - {K1, K2} when K2 < K1 -> cleanup_refresh_compare( + K2 < K1 -> cleanup_refresh_compare(IE, CleanupCtx#cleanup_ctx{dstKeys=DstKeys, deletes=[K2|Deletes], lastKey=K2}) end. +-spec take_key(scrAnyKey()|scrAnyKeyVal()) -> scrAnyKey(). +take_key({K, _}) -> K; +take_key(K) -> K. + -spec take_keys(scrAnyKeys()|scrAnyKeyVals()) -> scrAnyKeys(). -take_keys([]) -> []; -take_keys([{_, _} | _] = KVs) -> [K || {K, _} <- KVs]; -take_keys(Keys) -> Keys. +take_keys(KVs) -> [take_key(KV) || KV <- KVs]. -spec last_key(scrAnyKeys()|scrAnyKeyVals()) -> scrAnyKey(). -last_key([{_, _} | _] = KVs) -> element(1, lists:last(KVs)); -last_key(Keys) -> lists:last(Keys). +last_key(KVs) -> take_key(lists:last(KVs)). %% ---------------------- %% Eunit Tests diff --git a/src/dperl/jobs/dpjob_office_365.erl b/src/dperl/jobs/dpjob_office_365.erl index 3cb0f5c5..94df86c0 100644 --- a/src/dperl/jobs/dpjob_office_365.erl +++ b/src/dperl/jobs/dpjob_office_365.erl @@ -151,6 +151,9 @@ , insert_dst/3 , update_dst/3 , report_status/3 + , is_equal/4 + , read_local_kvps_after_id/5 + , get_next_id_key/4 ]). % dderl_oauth exports @@ -160,6 +163,7 @@ , get_auth_token_key_prefix/1 , get_key_prefix/0 , get_key_prefix/1 + , get_local_key/4 ]). -spec get_auth_config() -> map(). @@ -196,6 +200,15 @@ get_local_key(Id, _Name, Channel, KeyPrefix) -> [Key] -> Key % ToDo: Check/filter with META key = Name end. +-spec get_next_id_key(remKey(), jobName(), scrChannel(), locKey()) -> {remKey(), locKey()} | ?NOT_FOUND. +get_next_id_key(Id, _Name, Channel, KeyPrefix) -> + Stu = {?CONTACT_INDEXID,Id}, + case dperl_dal:next_channel_index_key_prefix(Channel, Stu, KeyPrefix) of + [] -> ?NOT_FOUND; + [{NextId, NextKey}] -> {NextId, NextKey} % ToDo: Check/filter with META key = Name + end. + + % convert list of remote values (already maps) to list of {Key,RemoteId,RemoteValue} triples % which serves as a lookup buffer of the complete remote state, avoiding sorting issues % -spec format_remote_values_to_kv(remVal(), locKey(), jobName()) -> remVal(). @@ -208,6 +221,13 @@ get_local_key(Id, _Name, Channel, KeyPrefix) -> % Key = KeyPrefix ++ [new_local_id(RemoteId)], % format_remote_values_to_kv(Values, KeyPrefix, JobName, [{Key,RemoteId,format_value(Value, JobName)}|Acc]). +-spec is_equal(remKVP()|locKVP(), {remVal(), remMeta()}, {locVal(), locMeta()}, #state{}) -> boolean(). +is_equal(_, {Val,_}, {Val,_}, _State) -> true; +is_equal(_, {_,_}, {_,_}, _State) -> false; +is_equal(KVP, Val1, Val2, _State) -> + ?Info("is_equal unexpected input ~p~n~p~n~p",[KVP, Val1, Val2]), + false. + % format remote value into a special KV pair with seperated contact- and meta-maps % by projecting out syncable attributes(content / meta) -spec remote_kvp(remVal(), jobName()) -> remKVP(). @@ -273,6 +293,7 @@ fetch_src(Id, #state{name=Name, type=pull, apiUrl=ApiUrl, token=Token} = State) ?JTrace("Fetching contact with url : ~s", [ContactUrl]), case exec_req(ContactUrl, Token) of {error, unauthorized} -> reconnect_exec(State, fetch_src, [Id]); + {error, ?NOT_FOUND} -> ?NOT_FOUND; {error, Error} -> {error, Error, State}; #{<<"id">> := _} = RemVal -> remote_kvp(RemVal, Name); _ -> ?NOT_FOUND @@ -402,7 +423,7 @@ delete_dst(Id, #state{ name=Name, channel=Channel, keyPrefix=KeyPrefix ?NOT_FOUND -> {true, state}; {Id, {Value, Meta}} -> - case maps:without(Name, Meta) of + case maps:without([Name], Meta) of #{} -> % no other syncs remaining for this key dperl_dal:remove_from_channel(Channel, Key), {false, State}; @@ -422,20 +443,23 @@ report_status(_Key, _Status, _State) -> no_op. -spec load_dst_after_key(remKVP() | locKVP(), scrBatchSize(), #state{}) -> {ok, locKVPs(), #state{}} | {error, term(), #state{}}. load_dst_after_key({Id,{_,_}}, BlkCount, #state{name=Name, channel=Channel, type=pull, keyPrefix=KeyPrefix}) -> - {ok, read_local_kvps_after_id(Channel, Name, KeyPrefix, Id, BlkCount, [])}; + {ok, read_local_kvps_after_id(Channel, Name, KeyPrefix, Id, BlkCount)}; load_dst_after_key(_Key, BlkCount, #state{name=Name, channel=Channel, type=pull, keyPrefix=KeyPrefix} = State) -> ?Info("load_dst_after_key for non-matching (initial) key ~p", [_Key]), - case read_local_kvps_after_id(Channel, Name, KeyPrefix, ?COMPARE_MIN_KEY, BlkCount, []) of + case read_local_kvps_after_id(Channel, Name, KeyPrefix, ?COMPARE_MIN_KEY, BlkCount) of L when is_list(L) -> {ok, L, State}; {error, Reason} -> {error, Reason, State} end. %% starting after Id, run through remoteId index and collect a block of locKVP() data belonging to %% this job name and keyPrefix --spec read_local_kvps_after_id(scrChannel(), jobName(), locKey(), remKey(), scrBatchSize(), locKVPs()) -> locKVPs() | {error, term()}. -read_local_kvps_after_id(_Channel, _Name, _KeyPrefix, _Id, _BlkCount, _Acc) -> - % lists:prefix(KeyPrefix,K) - []. +-spec read_local_kvps_after_id(scrChannel(), jobName(), locKey(), remKey(), scrBatchSize()) -> locKVPs() | {error, term()}. +read_local_kvps_after_id(Channel, Name, KeyPrefix, SeenId, BlkCount) -> + Stu = {?CONTACT_INDEXID, SeenId}, % last visited index key, get next BlkCount {Id, Value} tuples + case dperl_dal:read_channel_index_key_prefix_gt(Channel, Stu, KeyPrefix, BlkCount) of + [] -> []; + LocKeys -> [local_kvp(Id, dperl_dal:read_channel(Channel, Key), Name) || {Id,Key} <- LocKeys] + end. -spec load_src_after_key(remKVP()| locKVP(), scrBatchSize(), #state{}) -> {ok, remKVPs(), #state{}} | {error, term(), #state{}}. @@ -560,6 +584,8 @@ exec_req(Url, Token) -> imem_json:decode(list_to_binary(Result), [return_maps]); {ok, {{_, 401, _}, _, _}} -> {error, unauthorized}; + {ok, {{_, 404, _}, _, _}} -> + {error, ?NOT_FOUND}; {error, Reason} -> ?Info("exec_req get ~p returns error ~p",[Url,Reason]), {error, Reason} @@ -581,12 +607,14 @@ exec_req(Url, Token, Body, Method) -> {ok, {{_, 200, _}, _, Result}} -> % update/patch result imem_json:decode(list_to_binary(Result), [return_maps]); - {ok,{{_, 204, _}, _, _}} -> + {ok, {{_, 204, _}, _, _}} -> % delete result ok; {ok, {{_, 401, _}, _, Error}} -> ?JError("Unauthorized body : ~s", [Error]), {error, unauthorized}; + {ok, {{_, 404, _}, _, _}} -> + {error, ?NOT_FOUND}; Error -> {error, Error} end. From 7494d77b856626af53fcee1b106b14ee1a0fe70d Mon Sep 17 00:00:00 2001 From: stoch Date: Sat, 4 Jul 2020 00:06:06 +0200 Subject: [PATCH 65/72] fix otp22 issues (tuple calls) in dderl diff module and implement JSON diffing, WIP --- src/dderl_dal.erl | 4 +- src/dderl_diff.erl | 9 ++-- src/dderl_fsm.erl | 2 +- src/dderl_oauth.erl | 51 ++++++++++---------- src/dderl_session.erl | 18 +++++--- src/dperl/dperl_strategy_scr.erl | 2 + src/dperl/jobs/dpjob_office_365.erl | 72 ++++++++++++++++++----------- src/gen_adapter.erl | 8 ++-- src/imem_adapter.erl | 10 ++-- 9 files changed, 103 insertions(+), 73 deletions(-) diff --git a/src/dderl_dal.erl b/src/dderl_dal.erl index 350cddbf..060ee50c 100644 --- a/src/dderl_dal.erl +++ b/src/dderl_dal.erl @@ -84,13 +84,13 @@ del_conn(Sess, UserId, ConId) -> HasAll = (erlimem_session:run_cmd(Sess, have_permission, [[?MANAGE_CONNS]]) == true), if HasAll -> ok = erlimem_session:run_cmd(Sess, delete, [ddConn, ConId]), - ?Info("user ~p deleted connection ~p", [UserId, ConId]), + %?Info("user ~p deleted connection ~p", [UserId, ConId]), ok; true -> case erlimem_session:run_cmd(Sess, select, [ddConn, [{#ddConn{id=ConId,owner=UserId,_='_'},[],['$_']}]]) of {[_|_], true} -> ok = erlimem_session:run_cmd(Sess, delete, [ddConn, ConId]), - ?Info("user ~p deleted connection ~p", [UserId, ConId]), + %?Info("user ~p deleted connection ~p", [UserId, ConId]), ok; _ -> ?Error("user ~p doesn't have permission to delete connection ~p", [UserId, ConId]), diff --git a/src/dderl_diff.erl b/src/dderl_diff.erl index c19dff3d..11c76734 100644 --- a/src/dderl_diff.erl +++ b/src/dderl_diff.erl @@ -10,7 +10,9 @@ -spec term_diff(term(), pid(), atom(), binary(), atom(), binary()) -> binary(). term_diff(Sess, SessPid, LeftType, LeftValue, RightType, RightValue) -> - case Sess:run_cmd(term_diff, [LeftType, LeftValue, RightType, RightValue, [ignore_whitespace,ignore_casing,ignore_dquotes]]) of + %?Info("term_diff called with args: ~p ~p ~p ~p", [LeftType, LeftValue, RightType, RightValue]), + %?Info("term_diff called with Sess ~p SessPid ~p", [Sess, SessPid]), + case erlimem_session:run_cmd(Sess, term_diff, [LeftType, LeftValue, RightType, RightValue, [ignore_whitespace,ignore_casing,ignore_dquotes]]) of {error, {{Ex, M}, _Stacktrace} = Error} -> ?Error("Error on term_diff ~p: ~p", [{LeftType, LeftValue, RightType, RightValue}, Error], _Stacktrace), Err = list_to_binary(atom_to_list(Ex) ++ ": " ++ @@ -26,7 +28,7 @@ term_diff(Sess, SessPid, LeftType, LeftValue, RightType, RightValue) -> Err = list_to_binary(lists:flatten(io_lib:format("~p", [Reason]))), #{<<"error">> => Err}; DiffResult -> - ?Debug("The diff result ~p", [DiffResult]), + %?Info("The diff result ~p", [DiffResult]), FsmCtx = get_fsmctx(DiffResult), StmtFsm = dderl_fsm:start(FsmCtx, SessPid), Columns = gen_adapter:build_column_json(lists:reverse(get_columns())), @@ -56,8 +58,7 @@ get_fsmctx(Result) -> [fun(_Opts, _Count) -> Rows = [{{}, {RowId, Left, Cmp, Right}} || {ddTermDiff, RowId, Left, Cmp, Right} <- Result], % This seems hackish but we don't want to keep a process here. - % TODO: Revisit after tuple calls have been removed. - dderl_fsm:rows({self(), {Rows, true}}, {dderl_fsm, self()}) + dderl_fsm:rows(self(), {self(), {Rows, true}}) end] ,fetch_close_funs = [fun() -> ok end] ,stmt_close_funs = [fun() -> ok end] diff --git a/src/dderl_fsm.erl b/src/dderl_fsm.erl index 9f250770..e67598d9 100644 --- a/src/dderl_fsm.erl +++ b/src/dderl_fsm.erl @@ -333,7 +333,7 @@ rows(Pid, {StmtRef, {Rows,Completed}}) when is_list(Rows) -> % from erlimem/ime %?Info("dderl_fsm:rows from ~p ~p~n~p", [StmtRef, length(Rows), Rows]), gen_statem:cast(Pid, {rows, {StmtRef,Rows,Completed}}); rows(Pid, {Rows,Completed}) when is_list(Rows) -> - %?Info("dderl_fsm:rows ~p ~p", [length(Rows), Completed]), + ?Info("dderl_fsm:rows ~p ~p", [length(Rows), Completed]), gen_statem:cast(Pid, {rows, {self(),Rows,Completed}}); rows(Pid, {StmtRef, Error}) -> % from erlimem/imem_server %?Info("dderl_fsm:rows from ~p ~p", [StmtRef, Error]), diff --git a/src/dderl_oauth.erl b/src/dderl_oauth.erl index 3477d2c6..4424cb38 100644 --- a/src/dderl_oauth.erl +++ b/src/dderl_oauth.erl @@ -13,16 +13,16 @@ get_token_info(AccountId, TokenPrefix, _SyncType) -> dderl_dal:read_from_avatar_channel(AccountId, TokenPrefix ++ [?TOKEN_KEYPART]). -set_token_info(AccountId, TokenPrefix, TokenInfo, SyncType) when is_map(TokenInfo) -> - set_token_info(AccountId, TokenPrefix, imem_json:encode(TokenInfo), SyncType); -set_token_info(AccountId, TokenPrefix, TokenInfo, SyncType) when is_list(TokenInfo) -> - set_token_info(AccountId, TokenPrefix, list_to_binary(TokenInfo), SyncType); +set_token_info(AccountId, TokenPrefix, TokenInfo, SyncMod) when is_map(TokenInfo) -> + set_token_info(AccountId, TokenPrefix, imem_json:encode(TokenInfo), SyncMod); +set_token_info(AccountId, TokenPrefix, TokenInfo, SyncMod) when is_list(TokenInfo) -> + set_token_info(AccountId, TokenPrefix, list_to_binary(TokenInfo), SyncMod); set_token_info(AccountId, TokenPrefix, TokenInfo, _SyncType) when is_binary(TokenInfo) -> - ?Info("set_token_info using ~p",[imem_enc_mnesia:get_enc_hash()]), + %?Info("set_token_info using ~p",[imem_enc_mnesia:get_enc_hash()]), dderl_dal:write_to_avatar_channel(AccountId, TokenPrefix ++ [?TOKEN_KEYPART], TokenInfo). -get_authorize_url(XSRFToken, AuthConfig, SyncType) -> - State = #{xsrfToken => XSRFToken, type => SyncType}, +get_authorize_url(XSRFToken, AuthConfig, SyncMod) -> + State = #{xsrfToken => XSRFToken, type => SyncMod}, #{auth_url:=Url, client_id:=ClientId, redirect_uri:=RedirectURI, scope:=Scope} = AuthConfig, UrlParams = dperl_dal:url_enc_params( #{"client_id" => ClientId, "redirect_uri" => {enc, RedirectURI} @@ -30,12 +30,12 @@ get_authorize_url(XSRFToken, AuthConfig, SyncType) -> erlang:iolist_to_binary([Url, "&", UrlParams]). -%% get token info from web service using the configuration from callback module +%% get token info for a web service using the configuration from callback module %% store it in the avatar table of AccountId under the key TokenPrefix || "#token#" -spec get_access_token(ddEntityId(), list(), string(), module()) -> ok | {error, term()}. -get_access_token(AccountId, TokenPrefix, Code, SyncType) -> +get_access_token(AccountId, TokenPrefix, Code, SyncMod) -> AuthConfig = try - SyncType:get_auth_config() % ToDo: AuthConfig may depend on JobName or TokenPrefix + SyncMod:get_auth_config() % ToDo: AuthConfig may depend on JobName or TokenPrefix catch _:E:S -> ?Error("Finding AuthConfig : ~p ñ~p", [E,S]), @@ -44,15 +44,15 @@ get_access_token(AccountId, TokenPrefix, Code, SyncType) -> %?Info("get_access_token AuthConfig: ~p",[AuthConfig]), #{token_url:=TUrl, client_id:=ClientId, redirect_uri:=RedirectURI ,client_secret:=Secret, grant_type:=GrantType - ,scope := Scope} = AuthConfig, + ,scope:=Scope} = AuthConfig, Body = dperl_dal:url_enc_params( - #{ "client_id" => ClientId, "scope" => {enc, Scope}, "code" => Code - , "redirect_uri" => {enc, RedirectURI}, "grant_type" => GrantType - , "client_secret" => {enc, Secret}}), + #{ "client_id"=>ClientId, "scope"=>{enc, Scope}, "code"=>Code + , "redirect_uri"=>{enc, RedirectURI}, "grant_type"=>GrantType + , "client_secret"=>{enc, Secret}}), ContentType = "application/x-www-form-urlencoded", case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {{_, 200, "OK"}, _, TokenInfo}} -> - set_token_info(AccountId, TokenPrefix, TokenInfo, SyncType), + set_token_info(AccountId, TokenPrefix, TokenInfo, SyncMod), ok; {ok, {{_, Code, _}, _, Error}} -> {error, Error}; @@ -64,23 +64,24 @@ get_access_token(AccountId, TokenPrefix, Code, SyncType) -> %% refresh access token from web service using the configuration from callback module %% store it in the avatar table of AccountId under the key TokenPrefix || "#token#" -spec refresh_access_token(ddEntityId(), list(), module()) -> {ok, binary()} | {error, term()}. -refresh_access_token(AccountId, TokenPrefix, SyncType) -> - #{token_url:=TUrl, client_id:=ClientId, scope:=Scope, client_secret:=Secret} - = SyncType:get_auth_config(), - ?Info("refresh_access_token ~p ~p ~p",[AccountId, TokenPrefix, SyncType]), - #{<<"refresh_token">>:=RefreshToken} = get_token_info(AccountId, TokenPrefix, SyncType), +refresh_access_token(AccountId, TokenPrefix, SyncMod) -> + #{token_url:=TUrl, client_id:=ClientId % , redirect_uri:=RedirectURI + ,client_secret:=Secret % , grant_type:=GrantType + ,scope:=Scope} = SyncMod:get_auth_config(), + %?Info("refresh_access_token ~p ~p ~p",[AccountId, TokenPrefix, SyncMod]), + #{<<"refresh_token">>:=RefreshToken} = get_token_info(AccountId, TokenPrefix, SyncMod), Body = dperl_dal:url_enc_params(#{ "client_id"=>ClientId, "client_secret"=>{enc, Secret} , "scope"=>{enc, Scope}, "refresh_token"=>RefreshToken , "grant_type"=>"refresh_token"}), ContentType = "application/x-www-form-urlencoded", - ?Info("refresh_access_token TUrl=~p",[TUrl]), - ?Info("refresh_access_token ContentType=~p",[ContentType]), - ?Info("refresh_access_token Body=~p",[Body]), - ?Info("refresh_access_token RefreshToken=~p",[RefreshToken]), + %?Info("refresh_access_token TUrl=~p",[TUrl]), + %?Info("refresh_access_token ContentType=~p",[ContentType]), + %?Info("refresh_access_token Body=~p",[Body]), + %?Info("refresh_access_token RefreshToken=~p",[RefreshToken]), case httpc:request(post, {TUrl, "", ContentType, Body}, [], []) of {ok, {{_, 200, "OK"}, _, TokenBody}} -> TokenInfo = imem_json:decode(list_to_binary(TokenBody), [return_maps]), - set_token_info(AccountId, TokenPrefix, TokenBody, SyncType), + set_token_info(AccountId, TokenPrefix, TokenInfo, SyncMod), #{<<"access_token">> := Token} = TokenInfo, {ok, Token}; Error -> diff --git a/src/dderl_session.erl b/src/dderl_session.erl index 2b2e0100..ac1d5c03 100644 --- a/src/dderl_session.erl +++ b/src/dderl_session.erl @@ -155,8 +155,8 @@ handle_info(inactive, #state{user = User, inactive_tref = ITref} = State) -> cancel_timer(ITref), {noreply, State#state{lock_state = screensaver}} end; -handle_info(die, #state{user=User}=State) -> - ?Info([{user, User}], "session ~p idle for ~p ms", [{self(), User}, ?SESSION_IDLE_TIMEOUT]), +handle_info(die, #state{user=_User}=State) -> + %?Info([{user, User}], "session ~p idle for ~p ms", [{self(), _User}, ?SESSION_IDLE_TIMEOUT]), {stop, normal, State}; handle_info(logout, #state{user = User} = State) -> ?Debug("terminating session of logged out user ~p", [User]), @@ -391,7 +391,7 @@ process_call({[<<"oura_ring_auth_config">>], _ReqData}, _Adapter, From, {SrcIp, act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "oura_ring_auth_config"}, State), AuthConfig = dpjob_ouraring_crawl:get_auth_config(), % ToDo: may depend on JobName or TokenPrefix Url = dderl_oauth:get_authorize_url(State#state.xsrf_token, AuthConfig, ?SYNC_OURARING), - ?Info("oura_ring_auth_config ~p ~p",[AuthConfig, Url]), + %?Info("oura_ring_auth_config ~p ~p",[AuthConfig, Url]), reply(From, #{<<"oura_ring_auth_config">> => #{<<"url">> => Url}}, self()), State; @@ -399,7 +399,7 @@ process_call({[<<"oauth2_callback">>], ReqData}, _Adapter, From, {SrcIp, _}, Sta act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "oauth2_callback", args => ReqData}, State), #{<<"oauth2_callback">> := #{<<"code">> := Code, <<"state">> := #{<<"type">> := SyncType}}} = jsx:decode(ReqData, [return_maps]), - ?Info("oauth2_callback SyncType: ~p Code: ~p",[SyncType, Code]), + %?Info("oauth2_callback SyncType: ~p Code: ~p",[SyncType, Code]), % ToDo: Check if this data can be trusted {SyncHandler,TokenPrefix} = try SH = binary_to_existing_atom(SyncType,utf8), @@ -409,7 +409,7 @@ process_call({[<<"oauth2_callback">>], ReqData}, _Adapter, From, {SrcIp, _}, Sta ?Error("Finding TokenPrefix : ~p", [E]), reply(From, #{<<"oauth2_callback">> => #{<<"error">> => <<"Error finding TokenPrefix">>}}, self()) end, - ?Info("oauth2_callback TokenPrefix: ~p",[TokenPrefix]), + %?Info("oauth2_callback TokenPrefix: ~p",[TokenPrefix]), case dderl_oauth:get_access_token(State#state.user, TokenPrefix, Code, SyncHandler) of ok -> reply(From, #{<<"oauth2_callback">> => #{<<"status">> => <<"ok">>}}, self()); @@ -478,7 +478,7 @@ process_call({[<<"del_con">>], ReqData}, _Adapter, From, {SrcIp,_}, act_log(From, ?CMD_NOARGS, #{src => SrcIp, cmd => "del_con", args => ReqData}, State), [{<<"del_con">>, BodyJson}] = jsx:decode(ReqData), ConId = proplists:get_value(<<"conid">>, BodyJson, 0), - ?Info([{user, State#state.user}], "connection to delete ~p", [ConId]), + %?Info([{user, State#state.user}], "connection to delete ~p", [ConId]), Resp = case dderl_dal:del_conn(Sess, UserId, ConId) of ok -> <<"success">>; Error -> [{<<"error">>, list_to_binary(lists:flatten(io_lib:format("~p", [Error])))}] @@ -695,16 +695,20 @@ process_call({Cmd, ReqData}, Adapter, From, {SrcIp,_}, #state{sess = Sess, user_ spawn_process_call(Adapter, CurrentPriv, From, Cmd, BodyJson, Sess, UserId, SelfPid) -> try + %?Info("spawn_gen_process_call Adapter=~p cmd=~p", [Adapter, Cmd]), + %?Info("spawn_process_call called with Sess ~p SelfPid ~p", [Sess, SelfPid]), + timer:sleep(1000), Adapter:process_cmd({Cmd, BodyJson}, Sess, UserId, From, CurrentPriv, SelfPid), SelfPid ! rearm_session_idle_timer catch Class:Error:Stacktrace -> - ?Error("Problem processing command: ~p:~p~n~p~n~p~n", + ?Error("Problem processing command: ~p:~p~n~p~n~p", [Class, Error, BodyJson, Stacktrace]), reply(From, [{<<"error">>, <<"Unable to process the request">>}], SelfPid) end. spawn_gen_process_call(Adapter, From, C, BodyJson, Sess, UserId, SelfPid) -> try + %?Info("spawn_gen_process_call adapter=~p cmd=~p", [Adapter, C]), gen_adapter:process_cmd({[C], BodyJson}, adapter_name(Adapter), Sess, UserId, From, undefined), SelfPid ! rearm_session_idle_timer catch Class:Error:Stacktrace -> diff --git a/src/dperl/dperl_strategy_scr.erl b/src/dperl/dperl_strategy_scr.erl index 47e08614..07ee3503 100644 --- a/src/dperl/dperl_strategy_scr.erl +++ b/src/dperl/dperl_strategy_scr.erl @@ -708,6 +708,8 @@ process_events([], Mod, State, _ShouldLog, _IsEqualFun, IsError) -> end; process_events([Key | Keys], Mod, State, ShouldLog, IsEqualFun, IsError) -> % Both values/KVPs are fetched again in order to avoid race conditions + %?Info("Difference src: ~p", [Mod:fetch_src(Key, State)]), + %?Info("Difference dst: ~p", [Mod:fetch_dst(Key, State)]), {NewIsError, NewState} = case {Mod:fetch_src(Key, State), Mod:fetch_dst(Key, State)} of {S, S} -> % exactly equal Erlang terms, nothing to do Mod:report_status(Key, no_op, State), diff --git a/src/dperl/jobs/dpjob_office_365.erl b/src/dperl/jobs/dpjob_office_365.erl index 94df86c0..73cb811d 100644 --- a/src/dperl/jobs/dpjob_office_365.erl +++ b/src/dperl/jobs/dpjob_office_365.erl @@ -44,7 +44,7 @@ ,client_secret => "12345" ,grant_type => "authorization_code" ,token_url => "https://login.microsoftonline.com/common/oauth2/v2.0/token" - ,scope => "offline_access https://graph.microsoft.com/people.read" + ,scope => "offline_access https://graph.microsoft.com/Contacts.ReadWrite" }, "Office 365 (Graph API) auth config" ) @@ -97,9 +97,9 @@ ?GET_CONFIG(metaAttributes, [__JOB_NAME], [<<"id">> - %,<<"lastModifiedDateTime">> - %,<<"changeKey">> - %,<<"parentFolderId">> + ,<<"lastModifiedDateTime">> + ,<<"changeKey">> + ,<<"parentFolderId">> %,<<"createdDateTime">> ], "Meta attributes used for Office365 contact change tracking" @@ -128,7 +128,8 @@ , token :: token() % access token binary , apiUrl :: string() , fetchUrl :: string() - , cycleBuffer = [] :: remKVPs() | remKVPs() % dirty buffer for one c/r cycle + , remCache = [] :: remKVPs() % filled at start of c/r cycle + , clBuffer = [] :: remKVPs() | remKVPs() % dirty buffer for one c/r cycle , isConnected = true :: boolean() % fail unauthorized on first use , isFirstSync = true :: boolean() , isCleanupFinished = true :: boolean() @@ -222,7 +223,7 @@ get_next_id_key(Id, _Name, Channel, KeyPrefix) -> % format_remote_values_to_kv(Values, KeyPrefix, JobName, [{Key,RemoteId,format_value(Value, JobName)}|Acc]). -spec is_equal(remKVP()|locKVP(), {remVal(), remMeta()}, {locVal(), locMeta()}, #state{}) -> boolean(). -is_equal(_, {Val,_}, {Val,_}, _State) -> true; +is_equal(_, {Val,Meta}, {Val,Meta}, _State) -> true; % both must match, want to update meta too is_equal(_, {_,_}, {_,_}, _State) -> false; is_equal(KVP, Val1, Val2, _State) -> ?Info("is_equal unexpected input ~p~n~p~n~p",[KVP, Val1, Val2]), @@ -274,11 +275,11 @@ connect_check_src(#state{isConnected=false, type=pull, accountId=AccountId, toke % UniqueKeys = lists:delete(undefined, lists:usort(Keys)), % {ok, UniqueKeys, State#state{auditStartTime=NextStartTime}} % end; -get_source_events(#state{cycleBuffer=[]} = State, _BulkSize) -> +get_source_events(#state{clBuffer=[]} = State, _BulkSize) -> {ok, sync_complete, State}; -get_source_events(#state{cycleBuffer=CycleBuffer} = State, _BulkSize) -> - ?Info("get_source_events result count ~p~n~p",[length(CycleBuffer), hd(CycleBuffer)]), - {ok, CycleBuffer, State#state{cycleBuffer=[]}}. +get_source_events(#state{clBuffer=ClBuffer} = State, _BulkSize) -> + %?Info("get_source_events result count ~p~n~p",[length(ClBuffer), hd(ClBuffer)]), + {ok, ClBuffer, State#state{clBuffer=[]}}. - spec connect_check_dst(#state{}) -> {ok, #state{}}. % | {error, term()} | {error, term(), #state{}} connect_check_dst(State) -> {ok, State}. % ToDo: Maybe implement for push destination? @@ -291,11 +292,14 @@ do_refresh(_State, _BulkSize) -> {error, cleanup_only}. % using cleanup/refresh fetch_src(Id, #state{name=Name, type=pull, apiUrl=ApiUrl, token=Token} = State) -> ContactUrl = ApiUrl ++ binary_to_list(Id), ?JTrace("Fetching contact with url : ~s", [ContactUrl]), + %?Info("Fetching contact with url : ~s", [ContactUrl]), case exec_req(ContactUrl, Token) of {error, unauthorized} -> reconnect_exec(State, fetch_src, [Id]); {error, ?NOT_FOUND} -> ?NOT_FOUND; {error, Error} -> {error, Error, State}; - #{<<"id">> := _} = RemVal -> remote_kvp(RemVal, Name); + #{<<"id">> := _} = RemVal -> + %?Info("Fetched contact ~p ", [RemVal]), + remote_kvp(RemVal, Name); _ -> ?NOT_FOUND end. @@ -331,7 +335,7 @@ fetch_dst(Id, #state{name=Name, channel=Channel, keyPrefix=KeyPrefix, type=pull} insert_dst(Id, {Id, {Value,Meta}}, #state{ name=Name, channel=Channel, type=pull , keyPrefix=KeyPrefix, template=Template} = State) -> Key = KeyPrefix ++ [new_local_id(Id)], - ?Info("insert_dst ~p",[Key]), + %?Info("insert_dst ~p",[Key]), MergedValue = maps:merge(maps:merge(Template, Value), #{<<"META">> => #{Name=>Meta}}), MergedBin = imem_json:encode(MergedValue), case dperl_dal:write_channel(Channel, Key, MergedBin) of @@ -367,7 +371,7 @@ insert_dst(Id, {Id, {Value,Meta}}, #state{ name=Name, channel=Channel, type=pull update_dst(Id, {Id, {Value,Meta}}, #state{ name=Name, channel=Channel, keyPrefix=KeyPrefix , type=pull, template=Template} = State) -> Key = get_local_key(Id, Name, Channel, KeyPrefix), - ?Info("update_dst ~p",[Key]), + %?Info("update_dst ~p",[Key]), case dperl_dal:read_channel(Channel, Key) of ?NOT_FOUND -> ?JError("update_dst key ~p not found for remote id ~p", [Key, Id]), @@ -418,7 +422,7 @@ update_local(Channel, Key, _OldVal, NewVal, NewMeta) -> delete_dst(Id, #state{ name=Name, channel=Channel, keyPrefix=KeyPrefix , type=pull, template=Template} = State) -> Key = get_local_key(Id, Name, Channel, KeyPrefix), - ?Info("delete_dst ~p",[Key]), + %?Info("delete_dst ~p",[Key]), case fetch_dst(Id, State) of ?NOT_FOUND -> {true, state}; @@ -445,7 +449,7 @@ report_status(_Key, _Status, _State) -> no_op. load_dst_after_key({Id,{_,_}}, BlkCount, #state{name=Name, channel=Channel, type=pull, keyPrefix=KeyPrefix}) -> {ok, read_local_kvps_after_id(Channel, Name, KeyPrefix, Id, BlkCount)}; load_dst_after_key(_Key, BlkCount, #state{name=Name, channel=Channel, type=pull, keyPrefix=KeyPrefix} = State) -> - ?Info("load_dst_after_key for non-matching (initial) key ~p", [_Key]), + %?Info("load_dst_after_key for non-matching (initial) key ~p", [_Key]), case read_local_kvps_after_id(Channel, Name, KeyPrefix, ?COMPARE_MIN_KEY, BlkCount) of L when is_list(L) -> {ok, L, State}; {error, Reason} -> {error, Reason, State} @@ -463,27 +467,39 @@ read_local_kvps_after_id(Channel, Name, KeyPrefix, SeenId, BlkCount) -> -spec load_src_after_key(remKVP()| locKVP(), scrBatchSize(), #state{}) -> {ok, remKVPs(), #state{}} | {error, term(), #state{}}. -load_src_after_key(_CurKVP, _BlkCount, #state{type=pull, fetchUrl=finished} = State) -> +load_src_after_key(_CurKVP, _BlkCount, #state{type=pull, fetchUrl=cached, remCache=[]} = State) -> {ok, [], State}; +load_src_after_key(CurKVP, BlkCount, #state{type=pull, fetchUrl=cached, remCache=RemCache} = State) -> + case element(1, hd(RemCache)) of + SmallKey when SmallKey =< CurKVP -> + {error, cannot_serve_smaller_key, State}; + _ when length(RemCache) > BlkCount -> + {Serve,Keep} = lists:split(RemCache, BlkCount), + {ok, Serve, State#state{remCache=Keep}}; + _ -> + {ok, RemCache, State#state{remCache=[]}} + end; load_src_after_key(CurKVP, BlkCount, #state{type=pull, fetchUrl=undefined, apiUrl=ApiUrl} = State) -> - UrlParams = dperl_dal:url_enc_params(#{"$top" => integer_to_list(BlkCount)}), - ContactsUrl = lists:flatten([ApiUrl, "?", UrlParams]), - load_src_after_key(CurKVP, BlkCount, State#state{fetchUrl=ContactsUrl}); -load_src_after_key(CurKVP, BlkCount, #state{ name=Name, type=pull, token=Token - , fetchUrl=FetchUrl} = State) -> + UrlParams = dperl_dal:url_enc_params(#{"$top" => integer_to_list(BlkCount)}), % , "$select" => "id" + FetchUrl = lists:flatten([ApiUrl, "?", UrlParams]), ?JTrace("Fetching contacts with url : ~s", [FetchUrl]), + load_src_after_key(CurKVP, BlkCount, State#state{fetchUrl=FetchUrl, remCache=[]}); +load_src_after_key(CurKVP, BlkCount, #state{ name=Name, type=pull, token=Token + , fetchUrl=FetchUrl, remCache=RemCache} = State) -> case exec_req(FetchUrl, Token) of {error, unauthorized} -> reconnect_exec(State, load_src_after_key, [CurKVP, BlkCount]); {error, Error} -> {error, Error, State}; #{<<"@odata.nextLink">> := NextUrl, <<"value">> := RemVals} -> KVPs = [remote_kvp(RemVal, Name) || RemVal <- RemVals], ?JTrace("Fetched contacts : ~p", [length(KVPs)]), - %?Info("First fetched contact : ~p", [element(1,hd(KVPs))]), - {ok, KVPs, State#state{fetchUrl=NextUrl}}; + load_src_after_key( CurKVP, BlkCount, + State#state{ fetchUrl=NextUrl, remCache=[KVPs|RemCache]}); #{<<"value">> := RemVals} -> % may be an empty list KVPs = [remote_kvp(RemVal, Name) || RemVal <- RemVals], ?JTrace("Last fetched contacts : ~p", [length(KVPs)]), - {ok, KVPs, State#state{fetchUrl=finished}} + SortedRemKVPs = lists:keysort(1, lists:append(RemCache, KVPs)), + load_src_after_key( CurKVP, BlkCount, + State#state{ fetchUrl=cached, remCache=SortedRemKVPs}) end. -spec reconnect_exec(#state{}, fun(), list()) -> @@ -498,7 +514,7 @@ reconnect_exec(State, Fun, Args) -> -spec do_cleanup(remKeys(), remKeys(), remKeys(), boolean(), #state{}) -> {ok, #state{}} | {ok, finish, #state{}} . do_cleanup(Deletes, Inserts, Diffs, IsFinished, #state{type=pull} = State) -> - NewState = State#state{cycleBuffer=Deletes++Diffs++Inserts}, + NewState = State#state{clBuffer=Deletes++Diffs++Inserts}, if IsFinished -> %% deposit cleanup batch dirty results in state for sync to pick up @@ -578,7 +594,8 @@ terminate(Reason, _State) -> %% use OAuth2 header with token -spec exec_req(string(), binary()) -> map() | {error, term()}. exec_req(Url, Token) -> - AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(Token)}], + AuthHeader = [ {"Authorization", "Bearer " ++ binary_to_list(Token)} + , {"Prefer","IdType=\"ImmutableId\""}], case httpc:request(get, {Url, AuthHeader}, [], []) of {ok, {{_, 200, "OK"}, _, Result}} -> imem_json:decode(list_to_binary(Result), [return_maps]); @@ -596,7 +613,8 @@ exec_req(Url, Token) -> %% use OAuth2 header with token -spec exec_req(string(), binary(), map(), atom()) -> ok | map() | {error, term()}. exec_req(Url, Token, Body, Method) -> - AuthHeader = [{"Authorization", "Bearer " ++ binary_to_list(Token)}], + AuthHeader = [ {"Authorization", "Bearer " ++ binary_to_list(Token)} + , {"Prefer","IdType=\"ImmutableId\""}], case httpc:request(Method, {Url, AuthHeader, "application/json", imem_json:encode(Body)}, [], diff --git a/src/gen_adapter.erl b/src/gen_adapter.erl index 4580d59e..528f4d1d 100644 --- a/src/gen_adapter.erl +++ b/src/gen_adapter.erl @@ -93,6 +93,7 @@ sql_params(Sql, Types) -> -spec process_cmd({[binary()], [{binary(), list()}]}, atom(), {atom(), pid()}, ddEntityId(), pid(), term()) -> term(). process_cmd({[<<"parse_stmt">>], ReqBody}, Adapter, _Sess, _UserId, From, _Priv) -> + %?Info("spawn_process_cmd Adapter=~p ReqBody=~p", [Adapter, ReqBody]), [{<<"parse_stmt">>,BodyJson}] = ReqBody, Sql = string:strip(binary_to_list(proplists:get_value(<<"qstr">>, BodyJson, <<>>))), if @@ -224,7 +225,7 @@ process_cmd({[<<"view_op">>], ReqBody}, _Adapter, Sess, _UserId, From, _Priv) -> [{<<"view_op">>,BodyJson}] = ReqBody, Operation = string:to_lower(binary_to_list(proplists:get_value(<<"operation">>, BodyJson, <<>>))), ViewId = proplists:get_value(<<"view_id">>, BodyJson), - ?Info("view_op ~s for ~p", [Operation, ViewId]), + %?Info("view_op ~s for ~p", [Operation, ViewId]), Res = case Operation of "rename" -> Name = proplists:get_value(<<"newname">>, BodyJson, <<>>), @@ -254,7 +255,7 @@ process_cmd({[<<"update_view">>], ReqBody}, Adapter, Sess, UserId, From, _Priv) TableLay = proplists:get_value(<<"table_layout">>, BodyJson, []), ColumLay = proplists:get_value(<<"column_layout">>, BodyJson, []), ViewId = proplists:get_value(<<"view_id">>, BodyJson), - ?Info("update view ~s with id ~p layout ~p", [Name, ViewId, TableLay]), + %?Info("update view ~s with id ~p layout ~p", [Name, ViewId, TableLay]), ViewState = #viewstate{table_layout=TableLay, column_layout=ColumLay}, if %% System tables can't be overriden. @@ -540,7 +541,8 @@ process_query(Query, Connection, Params, SessPid) -> -spec term_diff(list(), term(), pid(), pid()) -> term(). term_diff(BodyJson, Sess, SessPid, From) -> - ?Debug("Term diff called with args: ~p", [BodyJson]), + %?Info("term_diff called with args: ~p", [BodyJson]), + %?Info("term_diff called with Sess ~p SessPid ~p", [Sess, SessPid]), Statement = binary_to_term(base64:decode(proplists:get_value(<<"statement">>, BodyJson, <<>>))), {LeftType, LeftValue} = get_cell_value(proplists:get_value(<<"left">>, BodyJson, 0), Statement), {RightType, RightValue} = get_cell_value(proplists:get_value(<<"right">>, BodyJson, 0), Statement), diff --git a/src/imem_adapter.erl b/src/imem_adapter.erl index bc22b8e5..1d8b4e5c 100644 --- a/src/imem_adapter.erl +++ b/src/imem_adapter.erl @@ -449,10 +449,10 @@ process_cmd({[<<"system_views">>], ReqBody}, Sess, _UserId, From, Priv, SessPid) RespJson = jsx:encode([{<<"error">>, Reason}]); F -> C = dderl_dal:get_command(Sess, F#ddView.cmd), - ?Info("!!! C#ddCmd.command : ~p", [C#ddCmd.command]), - ?Info("!!! Sess : ~p", [Sess]), - ?Info("!!! {ConnId, imem} : ~p", [{ConnId, imem}]), - ?Info("!!! SessPid : ~p", [SessPid]), + %?Info("!!! C#ddCmd.command : ~p", [C#ddCmd.command]), + %?Info("!!! Sess : ~p", [Sess]), + %?Info("!!! {ConnId, imem} : ~p", [{ConnId, imem}]), + %?Info("!!! SessPid : ~p", [SessPid]), Resp = process_query(C#ddCmd.command, Sess, {ConnId, imem}, SessPid), ?Debug("ddViews ~p~n~p", [C#ddCmd.command, Resp]), RespJson = jsx:encode([{<<"system_views">>, @@ -671,6 +671,8 @@ process_cmd({[<<"restore_tables_as">>], BodyJson}, _Sess, _UserId, From, #priv{c Priv; process_cmd({[<<"term_diff">>], ReqBody}, Sess, _UserId, From, Priv, SessPid) -> [{<<"term_diff">>, BodyJson}] = ReqBody, + %?Info("process_cmd Priv=~p ReqBody=~p", [Priv, ReqBody]), + %?Info("process_cmd called with Sess ~p SessPid ~p", [Sess, SessPid]), % Can't be handled directly as SessPid is not given to gen_adapter. gen_adapter:term_diff(BodyJson, Sess, SessPid, From), Priv; From 109f1f8d243098709091006ef2f716a6f9015c3d Mon Sep 17 00:00:00 2001 From: shamis Date: Sat, 4 Jul 2020 23:30:15 +0200 Subject: [PATCH 66/72] removed tuple calls --- src/dderl_fsm.erl | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/dderl_fsm.erl b/src/dderl_fsm.erl index e67598d9..f2d640f1 100644 --- a/src/dderl_fsm.erl +++ b/src/dderl_fsm.erl @@ -1858,13 +1858,11 @@ sort_distinct_count_rows([_ | XT], [_ | YT]) -> sort_distinct_count_rows(XT, YT) %% -------------------------------------------------------------------- handle_info({StmtRef,{error,Reason}}, SN, State) -> %?Info("dderl_fsm:handle_info from ~p ~p",[StmtRef, {error,Reason}]), - Fsm = {?MODULE,self()}, - Fsm:rows({StmtRef,{error,Reason}}), + dderl_fsm:rows(self(), {StmtRef,{error,Reason}}), {next_state, SN, State}; handle_info({StmtRef,{Rows,Completed}}, SN, State) -> %?Info("dderl_fsm:handle_info from ~p Rows ~p completed ~p",[StmtRef, length(Rows), Completed]), - Fsm = {?MODULE,self()}, - Fsm:rows({StmtRef,Rows,Completed}), + dderl_fsm:rows(self(), {StmtRef,{Rows,Completed}}), {next_state, SN, State}; handle_info(cmd_stack_timeout, SN, #state{stack={button, <<"tail">>, RT}}=State) when SN =:= tailing; SN =:= passthrough -> From 35b2671d4d39e3b3a41cc5be6ea88aa8a9ff37a0 Mon Sep 17 00:00:00 2001 From: shamis Date: Sat, 4 Jul 2020 23:31:56 +0200 Subject: [PATCH 67/72] handling StmtRef when they are different but the status is ok --- src/dderl_fsm.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dderl_fsm.erl b/src/dderl_fsm.erl index f2d640f1..b0852b12 100644 --- a/src/dderl_fsm.erl +++ b/src/dderl_fsm.erl @@ -411,7 +411,7 @@ fetch_close(StmtRef, #state{fetchResults=FetchResults, ctx = #ctx{stmtRefs=StmtR NewFetchResults = [fetch_close_if_open(StmtRef,P,S,F) || {P,S,F} <- lists:zip3(StmtRefs,FetchResults,Fcf)], State#state{pfc=0, fetchResults=NewFetchResults}. -fetch_close_if_open(StmtRef, StmtRef, ok, FetchCloseFun) -> FetchCloseFun(), closed; +fetch_close_if_open(_StmtRef1, _StmtRef2, ok, FetchCloseFun) -> FetchCloseFun(), closed; fetch_close_if_open(_StmtRef1, _StmtRef2, S, _FetchCloseFun) -> S. -spec fetch_tailing(pid(), #state{}) -> #state{}. From fa2647b68a746bd528325429653c96b4d78d745a Mon Sep 17 00:00:00 2001 From: stoch Date: Thu, 9 Jul 2020 21:41:11 +0200 Subject: [PATCH 68/72] revert fsm change and implement parentFolderId check for fetch_src --- src/dderl_fsm.erl | 2 +- src/dperl/dperl_strategy_scr.erl | 2 +- src/dperl/jobs/dpjob_office_365.erl | 27 +++++++++++++++++---------- 3 files changed, 19 insertions(+), 12 deletions(-) diff --git a/src/dderl_fsm.erl b/src/dderl_fsm.erl index b0852b12..f2d640f1 100644 --- a/src/dderl_fsm.erl +++ b/src/dderl_fsm.erl @@ -411,7 +411,7 @@ fetch_close(StmtRef, #state{fetchResults=FetchResults, ctx = #ctx{stmtRefs=StmtR NewFetchResults = [fetch_close_if_open(StmtRef,P,S,F) || {P,S,F} <- lists:zip3(StmtRefs,FetchResults,Fcf)], State#state{pfc=0, fetchResults=NewFetchResults}. -fetch_close_if_open(_StmtRef1, _StmtRef2, ok, FetchCloseFun) -> FetchCloseFun(), closed; +fetch_close_if_open(StmtRef, StmtRef, ok, FetchCloseFun) -> FetchCloseFun(), closed; fetch_close_if_open(_StmtRef1, _StmtRef2, S, _FetchCloseFun) -> S. -spec fetch_tailing(pid(), #state{}) -> #state{}. diff --git a/src/dperl/dperl_strategy_scr.erl b/src/dperl/dperl_strategy_scr.erl index 07ee3503..8836a0b8 100644 --- a/src/dperl/dperl_strategy_scr.erl +++ b/src/dperl/dperl_strategy_scr.erl @@ -749,7 +749,7 @@ process_events([Key | Keys], Mod, State, ShouldLog, IsEqualFun, IsError) -> {S, D} -> % need to invoke is_equal() case IsEqualFun(Key, S, D) of false -> - ?Info("process_events diff detected key=~p~n~p~n~p",[Key, S, D]), + % ?Info("process_events diff detected key=~p~n~p~n~p",[Key, S, D]), execute_prov_fun("Updated", Mod, update_dst, [Key, S, State], ShouldLog, IsError); true -> Mod:report_status(Key, no_op, State), diff --git a/src/dperl/jobs/dpjob_office_365.erl b/src/dperl/jobs/dpjob_office_365.erl index 73cb811d..59ded57c 100644 --- a/src/dperl/jobs/dpjob_office_365.erl +++ b/src/dperl/jobs/dpjob_office_365.erl @@ -128,6 +128,7 @@ , token :: token() % access token binary , apiUrl :: string() , fetchUrl :: string() + , remFolderId :: remKey() % user's default contact folder id , remCache = [] :: remKVPs() % filled at start of c/r cycle , clBuffer = [] :: remKVPs() | remKVPs() % dirty buffer for one c/r cycle , isConnected = true :: boolean() % fail unauthorized on first use @@ -165,6 +166,8 @@ , get_key_prefix/0 , get_key_prefix/1 , get_local_key/4 + , exec_req/2 %% ToDo: Remove debugging export + , exec_req/4 %% ToDo: Remove debugging export ]). -spec get_auth_config() -> map(). @@ -239,7 +242,7 @@ remote_kvp(#{<<"id">>:=Id} = Value, Name) when is_map(Value) -> % by projecting out syncable attributes(content / meta) -spec local_kvp(remKey(), locVal(), jobName()) -> locKVP(). local_kvp(Id, Value, Name) when is_map(Value) -> - {Id, {maps:with(?CONTENT_ATTRIBUTES(Name), Value), maps:with(?META_ATTRIBUTES(Name), Value)}}. + {Id, {maps:with(?CONTENT_ATTRIBUTES(Name), Value), maps:get(Name, maps:get(<<"META">>, Value, #{}),#{})}}. -spec connect_check_src(#state{}) -> {ok,#state{}} | {error,any()} | {error,any(), #state{}}. connect_check_src(#state{isConnected=true} = State) -> @@ -289,7 +292,7 @@ do_refresh(_State, _BulkSize) -> {error, cleanup_only}. % using cleanup/refresh -spec fetch_src(remKey(), #state{}) -> ?NOT_FOUND | {remVal(), remMeta()}. % fetch_src(Key, #state{channel=Channel, type=push}) -> % dperl_dal:read_channel(Channel, Key); -fetch_src(Id, #state{name=Name, type=pull, apiUrl=ApiUrl, token=Token} = State) -> +fetch_src(Id, #state{name=Name, type=pull, apiUrl=ApiUrl, token=Token, remFolderId=RemFolderId} = State) -> ContactUrl = ApiUrl ++ binary_to_list(Id), ?JTrace("Fetching contact with url : ~s", [ContactUrl]), %?Info("Fetching contact with url : ~s", [ContactUrl]), @@ -297,9 +300,11 @@ fetch_src(Id, #state{name=Name, type=pull, apiUrl=ApiUrl, token=Token} = State) {error, unauthorized} -> reconnect_exec(State, fetch_src, [Id]); {error, ?NOT_FOUND} -> ?NOT_FOUND; {error, Error} -> {error, Error, State}; - #{<<"id">> := _} = RemVal -> - %?Info("Fetched contact ~p ", [RemVal]), - remote_kvp(RemVal, Name); + #{<<"id">>:=_} = RemVal when RemFolderId == undefined -> + ?Info("fetch_src unexpected RemFolderId in state"), + remote_kvp(RemVal, Name); + #{<<"id">>:=_, <<"parentFolderId">>:=RemFolderId} = RemVal -> + remote_kvp(RemVal, Name); _ -> ?NOT_FOUND end. @@ -491,15 +496,17 @@ load_src_after_key(CurKVP, BlkCount, #state{ name=Name, type=pull, token=Token {error, Error} -> {error, Error, State}; #{<<"@odata.nextLink">> := NextUrl, <<"value">> := RemVals} -> KVPs = [remote_kvp(RemVal, Name) || RemVal <- RemVals], - ?JTrace("Fetched contacts : ~p", [length(KVPs)]), + #{<<"parentFolderId">>:=FolderId} = hd(RemVals), + ?JTrace("Fetched contacts : ~p for folder ~p", [length(KVPs), FolderId]), load_src_after_key( CurKVP, BlkCount, - State#state{ fetchUrl=NextUrl, remCache=[KVPs|RemCache]}); + State#state{remFolderId=FolderId, fetchUrl=NextUrl, remCache=[KVPs|RemCache]}); #{<<"value">> := RemVals} -> % may be an empty list KVPs = [remote_kvp(RemVal, Name) || RemVal <- RemVals], - ?JTrace("Last fetched contacts : ~p", [length(KVPs)]), + #{<<"parentFolderId">>:=FolderId} = hd(RemVals), + ?JTrace("Last fetched contacts : ~p for folder ~p", [length(KVPs), FolderId]), SortedRemKVPs = lists:keysort(1, lists:append(RemCache, KVPs)), load_src_after_key( CurKVP, BlkCount, - State#state{ fetchUrl=cached, remCache=SortedRemKVPs}) + State#state{remFolderId=FolderId, fetchUrl=cached, remCache=SortedRemKVPs}) end. -spec reconnect_exec(#state{}, fun(), list()) -> @@ -626,7 +633,7 @@ exec_req(Url, Token, Body, Method) -> % update/patch result imem_json:decode(list_to_binary(Result), [return_maps]); {ok, {{_, 204, _}, _, _}} -> - % delete result + % delete or put result ok; {ok, {{_, 401, _}, _, Error}} -> ?JError("Unauthorized body : ~s", [Error]), From 276e0175282abf1c2a8896508c17ee2117a517e4 Mon Sep 17 00:00:00 2001 From: shamis Date: Sat, 11 Jul 2020 14:44:53 +0200 Subject: [PATCH 69/72] using the correct pid if rows fun --- src/dderl_diff.erl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/dderl_diff.erl b/src/dderl_diff.erl index 11c76734..c757b264 100644 --- a/src/dderl_diff.erl +++ b/src/dderl_diff.erl @@ -44,8 +44,9 @@ term_diff(Sess, SessPid, LeftType, LeftValue, RightType, RightValue) -> get_fsmctx(Result) -> % <> = crypto:strong_rand_bytes(4), RowCols = get_columns(), + Pid = self(), FullMap = build_full_map(RowCols), - #fsmctxs{stmtRefs = [self()] + #fsmctxs{stmtRefs = [Pid] ,stmtTables = [<<"term_diff">>] ,rowCols = RowCols ,rowFun = get_rowfun() @@ -58,7 +59,7 @@ get_fsmctx(Result) -> [fun(_Opts, _Count) -> Rows = [{{}, {RowId, Left, Cmp, Right}} || {ddTermDiff, RowId, Left, Cmp, Right} <- Result], % This seems hackish but we don't want to keep a process here. - dderl_fsm:rows(self(), {self(), {Rows, true}}) + dderl_fsm:rows(self(), {Pid, {Rows, true}}) end] ,fetch_close_funs = [fun() -> ok end] ,stmt_close_funs = [fun() -> ok end] From 315b15e0350b8c5a91ee3f350fcc1af46470ce19 Mon Sep 17 00:00:00 2001 From: stoch Date: Thu, 16 Jul 2020 22:55:22 +0200 Subject: [PATCH 70/72] fixes for write_no_audit --- src/dperl/dperl_dal.erl | 2 +- src/dperl/jobs/dpjob_office_365.erl | 11 ++++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/dperl/dperl_dal.erl b/src/dperl/dperl_dal.erl index 8bc01da8..98ad560b 100644 --- a/src/dperl/dperl_dal.erl +++ b/src/dperl/dperl_dal.erl @@ -122,7 +122,7 @@ write_channel(Channel, Key, Val) when is_map(Val); byte_size(Val) > 0 -> -spec write_channel_no_audit(binary(), any(), any()) -> ok | {error, any()}. write_channel_no_audit(Channel, Key, Val) when is_map(Val); byte_size(Val) > 0 -> - case catch imem_dal_skvh:write(system, Channel, Key, Val) of + case catch imem_dal_skvh:write_no_audit(system, Channel, Key, Val) of Res when is_map(Res) -> ok; {'EXIT', Error} -> {error, Error}; {error, Error} -> {error, Error}; diff --git a/src/dperl/jobs/dpjob_office_365.erl b/src/dperl/jobs/dpjob_office_365.erl index 59ded57c..c6a7bf02 100644 --- a/src/dperl/jobs/dpjob_office_365.erl +++ b/src/dperl/jobs/dpjob_office_365.erl @@ -381,15 +381,16 @@ update_dst(Id, {Id, {Value,Meta}}, #state{ name=Name, channel=Channel, keyPrefix ?NOT_FOUND -> ?JError("update_dst key ~p not found for remote id ~p", [Key, Id]), {true, State}; - #{<<"META">> := OldMeta} = OldVal -> + #{<<"META">> := OldMeta} = OldValMeta -> NewMeta = maps:merge(OldMeta, #{Name => Meta}), - AllVal = maps:merge(Template, OldVal), + AllVal = maps:merge(Template, OldValMeta), NewVal = maps:merge(AllVal, Value), - case update_local(Channel, Key, OldVal, NewVal, NewMeta) of + case update_local(Channel, Key, maps:without([<<"META">>], OldValMeta), + maps:without([<<"META">>], NewVal), NewMeta) of ok -> {false, State}; - {error, _Error} -> - ?JError("update_dst cannot update key ~p to ~p", [Key, NewVal]), + {error, Error} -> + ?JError("update_dst cannot update key ~p to ~p with error ~p", [Key, NewVal, Error]), {true, State} end end. From 86cf7c391c48270ea8066d6a23b3c4fecb5b59da Mon Sep 17 00:00:00 2001 From: stoch Date: Tue, 28 Jul 2020 18:44:26 +0200 Subject: [PATCH 71/72] fix typo in logging --- src/dderl_oauth.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dderl_oauth.erl b/src/dderl_oauth.erl index 4424cb38..88e7c93c 100644 --- a/src/dderl_oauth.erl +++ b/src/dderl_oauth.erl @@ -38,7 +38,7 @@ get_access_token(AccountId, TokenPrefix, Code, SyncMod) -> SyncMod:get_auth_config() % ToDo: AuthConfig may depend on JobName or TokenPrefix catch _:E:S -> - ?Error("Finding AuthConfig : ~p ñ~p", [E,S]), + ?Error("Finding AuthConfig : ~p~n~p", [E,S]), {error, E} end, %?Info("get_access_token AuthConfig: ~p",[AuthConfig]), From 822d9dbf82640c210a4a8e03eb9960fce66e31d3 Mon Sep 17 00:00:00 2001 From: shamis Date: Mon, 5 Jul 2021 20:27:41 +0200 Subject: [PATCH 72/72] dperl_graph2 merged with master resolved conflicts --- .gitignore | 1 + BUILD.md | 205 ++++++ LICENSE | 258 ++----- README.md | 34 +- config/shell.config | 8 + docker-compose.yml | 30 + priv/.BUILD_images/Connect.png | Bin 0 -> 34016 bytes priv/.BUILD_images/Login.png | Bin 0 -> 17361 bytes priv/.BUILD_images/Result.png | Bin 0 -> 24026 bytes priv/.BUILD_images/compose_up.png | Bin 0 -> 8136 bytes priv/.BUILD_images/docker_exec.png | Bin 0 -> 12758 bytes priv/.BUILD_images/git_clone.png | Bin 0 -> 21588 bytes priv/.BUILD_images/ping.png | Bin 0 -> 16848 bytes priv/.BUILD_images/rebar3_shell_end.png | Bin 0 -> 34875 bytes priv/.BUILD_images/rebar3_shell_start.png | Bin 0 -> 88661 bytes priv/.BUILD_images/sqlplus_1.png | Bin 0 -> 13172 bytes priv/.BUILD_images/sqlplus_2.png | Bin 0 -> 21840 bytes priv/.BUILD_images/start_oracle_db.png | Bin 0 -> 26713 bytes priv/.BUILD_images/yarn_end.png | Bin 0 -> 62231 bytes priv/.BUILD_images/yarn_start.png | Bin 0 -> 43776 bytes priv/dev/static/index.html | 1 + priv/dev/static/index.js | 3 +- priv/dev/static/scripts/dderl.js | 22 +- priv/dev/static/scripts/login.js | 133 +++- rebar.config | 18 +- src/dderl.app.src | 1 + src/dderl.erl | 18 +- src/dderl.hrl | 21 + src/dderl_dal.erl | 63 ++ src/dderl_resource.erl | 2 +- src/dderl_saml_handler.erl | 3 +- src/dderl_session.erl | 35 +- src/dderlodpi.erl | 44 +- src/dperl/README.md | 190 ++++++ src/dperl/jobs/dperl_ouraring_crawl.erl | 350 ++++++++++ src/dperl/services/dperl_service_oracle.erl | 703 ++++++++++++++++++++ src/dperl/services/dperl_service_oracle.hrl | 215 ++++++ test.escript | 28 + 38 files changed, 2134 insertions(+), 252 deletions(-) create mode 100644 BUILD.md create mode 100644 docker-compose.yml create mode 100644 priv/.BUILD_images/Connect.png create mode 100644 priv/.BUILD_images/Login.png create mode 100644 priv/.BUILD_images/Result.png create mode 100644 priv/.BUILD_images/compose_up.png create mode 100644 priv/.BUILD_images/docker_exec.png create mode 100644 priv/.BUILD_images/git_clone.png create mode 100644 priv/.BUILD_images/ping.png create mode 100644 priv/.BUILD_images/rebar3_shell_end.png create mode 100644 priv/.BUILD_images/rebar3_shell_start.png create mode 100644 priv/.BUILD_images/sqlplus_1.png create mode 100644 priv/.BUILD_images/sqlplus_2.png create mode 100644 priv/.BUILD_images/start_oracle_db.png create mode 100644 priv/.BUILD_images/yarn_end.png create mode 100644 priv/.BUILD_images/yarn_start.png create mode 100644 src/dperl/README.md create mode 100644 src/dperl/jobs/dperl_ouraring_crawl.erl create mode 100644 src/dperl/services/dperl_service_oracle.erl create mode 100644 src/dperl/services/dperl_service_oracle.hrl create mode 100644 test.escript diff --git a/.gitignore b/.gitignore index 90c06f7d..399e2535 100644 --- a/.gitignore +++ b/.gitignore @@ -23,3 +23,4 @@ node_modules/ priv/swagger/yarn.* .rebar3/ /dderl.dderl1@127.0.0.1/ +.idea diff --git a/BUILD.md b/BUILD.md new file mode 100644 index 00000000..a87e3691 --- /dev/null +++ b/BUILD.md @@ -0,0 +1,205 @@ +Buidling DDErl. +===== +`` +## Table of Contents + +**[1. Prerequisites](#prerequisites)**
    +**[2. Building DDErl](#buildinf_dderl)**
    +**[2.1 Building On Operating System Level](#building_on_operating_system_level)**
    +**[2.2 Building Using Docker Containers](#building_using_docker_containers)**
    + +---- + +## 1. Prerequisites + +Building DDErl is only supported for Unix and similar systems. +All instructions have been tested with Ubuntu 20.04 (Focal Fossa). + +The following software components are required in addition to a Unix operating system: + +| Component | From Version | Source | +| --------- | ------------- | ---------------------------------------------------| +| Erlang | 23.3.1 | https://erlang.org/download/otp_versions_tree.html | +| gcc | 10.2.0 | https://gcc.gnu.org/ | +| git | 2.31.1 | https://git-scm.com/ | +| GNU make | 4.3 | https://www.gnu.org/software/make/ | +| rebar3 | V3.14.3 | https://www.rebar3.org/ | +| Yarn | 1.22.10 | https://github.com/yarnpkg/yarn | + +## 2. Building DDErl + +The build process can either be done directly on the operating system level or based on the DDErl development image. +For the former, all the software components mentioned under section 1 must be installed, for the latter they are already pre-installed in the image. +In addition, a Docker compose script is available that combines the DDErl development image with an empty Oracle database. +This can be used as an easily customizable template. + +## 2.1 Building On Operating System Level + +### 2.1.1 Download the DDErl repository from GitHub + + git clone https://github.com/KonnexionsGmbH/dderl + +### 2.1.2 Change to the DDErl directory + + cd dderl + +### 2.1.3 Create the frontend to `DDErl` + + cd priv/dev + yarn install-build-prod + +### 2.1.4 Build alternatively + +#### Either backend and frontend: + + rebar3 as ui compile + +#### or backend only: + + rebar3 compile + +#### or frontend only: + + bash ./build_fe.sh + +## 2.2 Building Using Docker Containers + +The use of DDErl development image makes the build process independent of the host operating system. +The only requirement is the installation of Docker Desktop and possibly Docker Compose (Unix operating systems). +The following instructions demonstrate how to use the Docker compose script. + +### 2.2.1. Building DDErl with Docker Compose in the DDErl root directory + +This command installs an executable DDErl: + + docker-compose up -d + +**Sample output:** + +![](priv/.BUILD_images/compose_up.png) + +The following processing steps are performed: +1. If not already there, download the Oracle database image and create the container `dderl_db_ora` with an Oracle database (currently 19c). +2. If not yet available, download the Konnexion development image and create the corresponding container `dderl_dev`. +3. Both containers are assigned to network `dderl_default`. +4. After the database is ready, the schema `scott` is created with the password `regit` (only with a new database container). +5. The repository `https://github.com/KonnexionsGmbH/dderl` is downloaded from Github (only with a new development container). +6. The frontend to `DDErl` is created (only with a new development container). +7. `DDErl` is compiled and started. + +### 2.2.2. Building DDErl manually + +#### 2.2.2.1 Start the Oracle database container: + +##### If the network is not yet existing: + + docker network create dderl_default + +##### If the oracle database container is not yet existing: + + docker create --shm-size 1G --name dderl_db_ora --network dderl_default -p 1521:1521/tcp -e ORACLE_PWD=oracle konnexionsgmbh/db_19_3_ee + +##### Start the Oracle database container: + + docker start dderl_db_ora + +**Sample output:** + +![](priv/.BUILD_images/start_oracle_db.png) + +#### 2.2.2.2 Start and enter the DDErl development container + +##### If the DDErl development container is not yet existing: + + docker create --name dderl_dev --network dderl_default -p 8443:8443 -t konnexionsgmbh/dderl_dev:latest + +##### Start the DDErl development container: + + docker start dderl_dev + +##### Enter the DDErl development container: + + docker exec -it dderl_dev bash + +**Sample output:** + +![](priv/.BUILD_images/docker_exec.png) + +Inside the development container `dderl_dev` the database container `dderl_db_ora` can be addressed with the `dderl_db_ora` as hostname: + + ping dderl_db_ora + +![](priv/.BUILD_images/ping.png) + +#### 2.2.2.3 Optionally the database can be set up + + sqlplus sys/oracle@dderl_db_ora:1521/orclpdb1 as sysdba + +![](priv/.BUILD_images/sqlplus_1.png) + + create user scott identified by regit; + grant alter system to scott; + grant create session to scott; + grant unlimited tablespace to scott; + grant create table to scott; + grant create view to scott; + exit + +![](priv/.BUILD_images/sqlplus_2.png) + +#### 2.2.2.4 Next you need to download the DDErl repository from GitHub + + git clone https://github.com/KonnexionsGmbH/dderl + cd dderl + +**Sample output:** + +![](priv/.BUILD_images/git_clone.png) + +#### 2.2.2.5 Then the frontend to `DDErl` has to be created + + ./build_fe.sh + +**Sample output - start:** + +![](priv/.BUILD_images/yarn_start.png) + +**Sample output - end:** + +![](priv/.BUILD_images/yarn_end.png) + +#### 2.2.2.6 Now you can either execute one of the commands from section 2.1 point 4 or start DDErl directly with `rebar3 shell` + + rebar3 shell + +**Sample output - start:** + +![](priv/.BUILD_images/rebar3_shell_start.png) + +**Sample output - end:** + +![](priv/.BUILD_images/rebar3_shell_end.png) + +### 2.2.3 Finally DDErl is ready and can be operated via a Browser + +#### Login screen: + +![](priv/.BUILD_images/Login.png) + +User: `system` Password: `change_on_install` + +#### Database connection: + +| | | +| --- | --- | +| Service | **`orclpdb1`** | +| Host / IP | **`ddeerl_db_ora`** | +| Port | **`1521`** | +| User | **`scott`** | +| Password | **`regit`** | + +![](priv/.BUILD_images/Connect.png) + +##### Start browsing: + +![](priv/.BUILD_images/Result.png) diff --git a/LICENSE b/LICENSE index 8dada3ed..8dbc22c2 100644 --- a/LICENSE +++ b/LICENSE @@ -1,201 +1,65 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ + Konnexions Public License (KX-PL) + Version 2020.05, May 2020 +https://github.com/KonnexionsGmbH/shared_resources/blob/master/License/KX-PL-2020.05.pdf - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + This license governs use of the accompanying software. If you use the software, you + accept this license. If you do not accept the license, do not use the software. 1. Definitions. - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + The terms "reproduce", "reproduction", "derivative works", and "distribution" + have the same meaning here as under U.S. copyright law. + + A "contribution" is the original software, or any additions or changes to the + software. + + A "contributor" is any person that distributes its contribution under this + license. + + "Licensed patents" are a contributor's patent claims that read directly on its + contribution. + + 2. Grant of Rights + + (a) Copyright Grant- Subject to the terms of this license, including the license + conditions and limitations in section 3, each contributor grants you a non- + exclusive, worldwide, royalty-free copyright license to reproduce its + contribution, prepare derivative works of its contribution, and distribute + its contribution or any derivative works that you create. + + (b) Patent Grant- Subject to the terms of this license, including the license + conditions and limitations in section 3, each contributor grants you a non- + exclusive, worldwide, royalty-free license under its licensed patents to + make, have made, use, sell, offer for sale, import, and/or otherwise dispose + of its contribution in the software or derivative works of the contribution + in the software. + + 3. Conditions and Limitations + + (a) No Trademark License- This license does not grant you rights to use any + contributors' name, logo, or trademarks. + + (b) If you bring a patent claim against any contributor over patents that you + claim are infringed by the software, your patent license from such + contributor to the software ends automatically.0 + + (c) If you distribute any portion of the software, you must retain all copyright, + patent, trademark, and attribution notices that are present in the software. + + (d) If you distribute any portion of the software in source code form, you may do + so only under this license by including a complete copy of this license with + your distribution. If you distribute any portion of the software in compiled + or object code form, you may only do so under a license that complies with + this license. + + (e) The software is licensed "as-is." You bear the risk of using it. The + contributors give no express warranties, guarantees or conditions. You may + have additional consumer rights under your local laws which this license + cannot change. To the extent permitted under your local laws, the + contributors exclude the implied warranties of merchantability, fitness for a + particular purpose and non-infringement. + + (f) Source code usage under this License is limited to review, compilation and + contributions. Contributions to Konnexions software products under this + License may only be made in consultation with Konnexions GmbH and through the + appropriate Konnexions software repositories. diff --git a/README.md b/README.md index 959bd025..d5e25f84 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,23 @@ DDErl: Data Discovery Tool. ===== -### Build -1. Build backend and frontend `rebar3 as ui compile` -1. Build backend only `rebar3 compile` -1. Build frontend only `bash ./build_fe.sh` -### Start Console -1. `rebar3 shell` or `ESCRIPT_EMULATOR=werl rebar3 shell` (for GUI in windows) or `ERL_FLAGS="-proto_dist imem_inet_tcp" rebar3 shell` (to start with imem_inet_tcp as proto_dist) -1. go to https://127.0.0.1:8443/dderl in your browser -### Features +![Travis (.com)](https://img.shields.io/travis/com/KonnexionsGmbH/dderl.svg?branch=master) +![GitHub release](https://img.shields.io/github/release/KonnexionsGmbH/dderl.svg) +![GitHub Release Date](https://img.shields.io/github/release-date/KonnexionsGmbH/dderl.svg) +![GitHub commits since latest release](https://img.shields.io/github/commits-since/KonnexionsGmbH/dderl/3.9.7.svg) +---- + +#### Table of Contents + +**[Features](#features)**
    +**[Start Console](#start_console)**
    +**[Certificates](#certificates)**
    +**[Building DDErl](BUILD.md)**
    + +---- + +## Features + 1. Browse mnesia and oracle tables in the browser 1. Add and update data 1. Import and Export data @@ -28,7 +37,13 @@ DDErl: Data Discovery Tool. 1. Connect to other imem server over TCP with SSL 1. CSV file parsing -### Certificates +## Start Console + +1. `rebar3 shell` or `ESCRIPT_EMULATOR=werl rebar3 shell` (for GUI in windows) or `ERL_FLAGS="-proto_dist imem_inet_tcp" rebar3 shell` (to start with imem_inet_tcp as proto_dist) +1. go to https://127.0.0.1:8443/dderl in your browser + +## Certificates + DDErl runs on SSL. A default certificate/key pair is [supplied](https://bitbucket.org/konnexions/dderl/src/master/priv/certs/). This, however can be changed either by replacing these files at installation or modifying configuration in `ddConfig` table (`[{dderl,dderl,dderlSslOpts}]`). A sample configuration is given below: ```erlang [{cert,<<48,...,107>>}, @@ -47,4 +62,3 @@ To convert a PEM crt/key files to DER (accepted by erlang SSL binary certificate > public_key:pem_decode(PemKey). [{'RSAPrivateKey',<<48,130,2,92,2,1,0,2,129,129,0,160,95,...>>,not_encrypted}] ``` - diff --git a/config/shell.config b/config/shell.config index 5b37d66b..9c672dc0 100644 --- a/config/shell.config +++ b/config/shell.config @@ -16,6 +16,14 @@ {tcp_port, 1236} ]}, +%% WAX config + {wax, [ + {rp_id, auto}, + {metadata_update_interval, 3600}, + {metadata_access_token, <<"a21179872212686e4b126a7448085e796cbf1eaf1ec5ca7e">>}, + {allowed_attestation_types, [basic, uncertain, attca, self]} + ]}, + {lager, [ {handlers, [ {lager_console_backend, [ diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..bd348600 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,30 @@ +version: '3.8' + +services: + db: + container_name: dderl_db_ora + environment: + ORACLE_PWD: oracle + image: konnexionsgmbh/db_19_3_ee:latest + ports: + - "1521:1521" + + dev: + command: /bin/bash -c " + while [ `docker inspect -f {{.State.Health.Status}} dderl_db_ora` != 'healthy' ]; + do docker ps --filter 'name=dderl_db_ora'; + sleep 30; + done + && sqlplus sys/oracle@dderl_db_ora:1521/orclpdb1 as sysdba @/usr/opt/dderl/run_db_setup.sql + && ./usr/opt/dderl/run_application_setup.sh + " + container_name: dderl_dev + depends_on: + - db + image: konnexionsgmbh/dderl_dev:latest + ports: + - "8443:8443" + tty: true + volumes: + - /var/run/docker.sock:/var/run/docker.sock +# - ./Transfer:/Transfer # Example: (create and) use the directory `Transfer` diff --git a/priv/.BUILD_images/Connect.png b/priv/.BUILD_images/Connect.png new file mode 100644 index 0000000000000000000000000000000000000000..e92c3ced143073f499b1674ae1596abf73995c73 GIT binary patch literal 34016 zcmbrlbyOTr6g8OO!F_NI!CeCb1Sd#<;BFy62<|e2yM+XI2@>2L28ZD8HV~X)aEDob z`|a7Y-`PETzCHWL^mJF9>bkGH>eYR>Uqz~aQozBazp{no zFRxyOxPO$D((*EaWxX-d(ta59AKpTGn@jg@3n{-SDU~(Jm-II*VcD*;(4x$KK12O! z1?&67s1K37sX%?xXJr2VS!w>iJz8c)8gHa|K3HiW_F7y|vc!5vdAeR915G6rPTudW zi|u9mj^Bw-y!YH~eD6|z%t01rP9{I8^KA9s2`TE!VE`o>sh0=4pgX^d2fNp%n&sul zL-BZ-fUKpHm0*b7>(yMxx%AUq!zqBMnC%~FZ~fO8i`*!rXqk}^x$f6-DcOmJ*fT!1 z$ih2SZTQH9D5RmuZ=!>c-7bz;n#hW-YF#&YY@1! zn0+nJCN9zDsad{QNzC!HU23!qz;AwU8ybAEu4SP|o*Qx*!%FqGE2Q>v>PLszPbAQC ze62%1YEh5Ac>l*hL}3wwq$I6w99f`UK|a3gg7NS(_KcC4hRxmVKC50YY|`B8IXlur z(q3b0Ck0obA2ryiRl$>K{bw~ZrY#&&g&`e^A9fyv*o+ljKOtrXabK5?o% z^zvYag@xtA4<3{6&X@ZdC}?OsV`I0Pq>Ux+xFI?fK>lQx)G0Pi@ggG?DL3Mpjb`ny(vSc0dL`k{a)( zuls(0DR?6gmDa!fpRu?H$2((hCN?&&^kJ<`X5!;Foy)KdU(>;OYCI^d+L^Vm3{oU zzfcA+0bgpMU`zy>Q$(8sOpR2@_PP66v(BlzYyD^6&E9z7#uC*ZFp_}Yp-xEhHON`z z``xOyXlH#Y;Ob3_qfUf1GJnb!une!YkKd$0KkL64{4%OIRds|9{F)il!Zln_9MmBB z6mrE-4_NOqMm!7qL{*IL$CYW+$@N#!NF}&&Q7wuPdsU` z|FRD8Uy)O0opy>Kg6{V`lmi}|T~||ds*o5t41iviMdCTh}AP4snjW-Z*Hq}NQHA3Zhi z#TTNd}An_uy-f|e%OmpBT+t$-=8`#)k`oIH%SdC9uYgQ-t^epUYIn}{t z#P4oW!*~3L>m?WHJ;ymQbUzgqMZO00>iJ!*i867Jci>(u5D&X@3>pc-8!}E|%p1^D z7x3Dd&UV~?Bj?1Tni;&Z=%u{hyP=6yIkRI9%}x} zGn}X~{gdf-q8E?dpL2oiThISA%nY~G!3R7$UrU?elIr%ff?60y&5xy$Wj)Nt7N@FzLtB11Te+zW6Z5VXgCcYEg_wMbpWQalvtNtQCR<~3q+zcCONs!-M;G6_*m>{Tp6WyY7kH0d}kVSQyRxQLW)tQHS zbS|T`4Lxobe=>Jd#CUdJvQqLp8;D&@PeFHlSwMNye+Nq&(lgwf$!`jJ9l)?v5~KU2 zB+)a9B93?HUoCJyV*=KEYceGW&3Bthc67;7NwGn^WApiZ{d4_$z^SixVax3pv)5L% z&H^ZC9u4W^-3>w4`~c!9MB(0%_Wp=npr<6tO|L@IZ6s7C;pW)V>FB$_fm%uuv}=ZY zYmwQM%{l&LFW>ZB!DeIRI$r>!X8!bx8ySzutem#r)BNu*bm!hW4fXssz*Z6Mpv4{a z09=B&B39v;awXB;VvBvlQr_Qrbnb}PVn-Z9_w(4cKHNC))-e%VrTrSMJm|nLZkk__ zHTFLwm#(Dz^07S^DW-8fQJ5-Emzs~A{*ny$m)+JgMXZvko(8jjMCX1NBQ7RJIKym3 zGVfw{sr410C-d`U|C-X%l0)BN`Vi2~HnS1Z_&-K=4d%>+K)r70j@!PF#bHI?+%8|0 zjILJv{zZ2QhO6Gn8s+w7h;_8;r;Yr)_43ZU?0cg^MAL(%YZB=~|7r>G7;H$&J>X2A z$UhL*kBFs-<%53w!BR4#%Ep1I;=0`=uAoP3AR+`v*Piw2?|L3GJjGOEHVLd-r=B`+ zqCsQ!4~~dWfj-J85Uh7la-~X;$aLP<_hU$_-o8=m?wui8hpn*B$Q1Tyia?YBqft>o zDflr~kjEf5=ugkf)5*@67cROztaU-GB{c~|A88IK`n7ER9Ep1{(9e3$`CBu1Tyvh* zfTCjp#?eF;_IDAKDjrJuDPt9){EZW{B2r?lZHP0>yb(Sw7#T#2R%Dg8rr~}cVNMoi z+wns1SSFUS49be}DX8dPTHm=Z&|kk;Gbh`9?q~-Ht)_XLGYivLy55r_{qLqSs0fyP zvjh6f_n_t5rle@;jUSc&aZghZnis+dwaL%*Mk^K3i|9GvAMmnc1csObdwAFkJx{Fv ztDRA}tWfAlE}k@y%QKyD_^%;n6iYoX(F>|6c`r2ig@FHHT~&|r|Hr%k2O~!Nf1>37 zt5r||jm0`lo~R0OBFN8K>J*bH6qhCW+g9M!&So7)d_&2|r`LuTI~rHkQ(OIdXLAU3 zZSBIowY4?pZJY(m`O!3wi$l!F9#nu4oh5c}J*Fb@VeiSB@3@I)Z>;qO2riyB?e&&QWDvXfw%f1l|)gOb@baj zDv}+@zvpd)$2Ijc=lJEriV-S?k7t9l)^G>{OkOwcX)Br20sphmU>2>0Xxcs2tm01Q zb;XR1V;^+5gQ=x`erjKOaJ7nipXdO0WsM(qjJ~6Nn*nmLS0Ay*VUx_t_><nw2AriHl4!fAW1j$%1^??cgHFuBxHz+(Zs|g)%n4Gf4PDE{QUe&=>;8k zu4eHkJ~S)CE9~Q)7KUiad+ap-lL4-&Fq<}Bubm{_|KcTb{Z%BqYI}o4maeHuP>nTpP|duoURN;U~LXrA20aPi&)P#<1#JB|GC9ud~#B~ z(62G~BHRD@j_+$$CUSU<4@fi{?1Z)}`9$|FCbn zft&}6QY^vcaZRn4n35~5@E}+`Sx)8Ldp@=}fueh+B z2KWyb8u6(DRpf7a6DPx_c6;w?@+RDw6gl2Btj2&2-(prWV6eq9VM~@9hL^Bcg~Z1< zsqhZ2tQhLc+)C_-af%QbFsAZd=@=1!k_c@wroRol3KE)vuy%Gxns4IBm2zQy3+q2y zaFB7f*mWxtYg#X$;k4;EyFM=iH4l5;r`7!^tU#d*YyI)=Vr0fKAVAa-`@J1hF zJdnY1*YEo3Mv2a5a)cDQRaDP$?aaENI|pq@IzjS#^;O`5rgNpCW5iG1qu<{Zg^6gB zD#Sdrm6_Fco{Klsk#-EFt#mKujwWRWOYVqg?Ft~ox)&AHWzy@)UqbBOB(ZBUYk2Lp z?D05len`PbAt4Og+%)@QWtEqo|G7vs1sNTW!6-saQ!093qvBDAh-z5}@DsMvLP+Vq z9ON$P@VOBrmQ{dau@o($ht zS?XJDdi69BXNKqdBX06CaRIWK*#uF1Yq(^Lzp-cNsG#(WgeK?ofA5Q9+vRXo)-_2T384l$s(_?J@G z_r}^x<1kjsSUhoax4F3moAp=rng*)gmC{yLOeTIis^n$3J5v&>Z^qVc_NRIeFY#58 zr`&$9O9khN(*2xM2`&;H!Ue1ch(-{7QJwLF6o#rz=BgbLG@|K>%8PSL?P*td1T|ts z$Jqr*Ba7mk-e2fvXGo~Qld#*I0+#CMJZ%rY?iBh-BU+{k!bdod)Uy`T*(}RnUJ#l8 zod}T68rQ@dCYXYcMi?@hC6MIi3y`ks-_QOQ=RjKkCW*XzlbJaZqW8KI>&B0H zG30suRwJlim&32^RcxGav9YyejUb!l3dXXHNosl-Yzzqz5fMLliIO$tw9u65GC!B% zeEG9tadn?VGGa_^WJ?i=rnhWzbK&vgNM`+(Q;Hmp$L5amihaKsn9UpBHdeQzSq&@t zuzYiJ>rQ=n)Ezu=8rB+g=>}vJn)nePg$Fltwg=Dd0M`}z1`!A?y`%+G&k7Z1+_AfZ zf)o>9tm5GjtI;9!94_r3n*RavER}|_A-qBEOArlbAuDsxJcZ5cv}p`N!Y2w8Sn=*u zx}1?wX00j^@p!Q;8upwiC>7i&>h!Dd6LL$GXsAqZz9@GU+BxPdYo2ooHt$Hm2O2b- zA%Pv4VD>RY*`)WC8hHX>da3GH+FT`W-~F}?fgzqZ<-NeNE;p6HQz0aK*}`u)sv~6i zGxv)bN@Ns<_!##zJ^nOcvK+r_@^zEjPYqIWw_Hrx-h4-4N0q&(fQ~}CS{x{cqxqQwK0V%{^h@a zWT(?*Rrw_)7{O?`TEidWdrp?>(8J@Yx@Y~@g8Ws}^4+d?W9q=WTVaMpjLNfVOdUQH z=mAgYqs2xMub+An9zXt!;jP5fzrL&|@&;6*cyqtina}=dal9>{LAO76N+E-f;d5M{ zm(Aybl3ryBF-PX(1IuVQ6STVHNZZ)Z%Hq<-jc19CbfgQ96NdWIq(|dqfRlL#$(;Z4 zJbHG*9B=t5U#lA5J={01GUmM15_sxAdeboYC%>zn;}-qwMf&_dB-L5pytMx~-uwSU zTm4@pa{sd;`@h;s<0$2LQhD5eTr8;WwXsuCtx~4cSHt zkg{gh+noiNUgwtGV=YYgAB{x$Z&XisTy(feNQOf@F2Gbk_M@G>5wEQ%W*kXW*b16; zzofK%Pw_;il;ItNAIVPt?bgPUkTt<3JRY6`qH`t7hxh5%#>t9*{^9=dsstB1@oNRG zCeUujmv7!>=>3mqHOL*!0eKpWxVO^p%uvs8+=v*mkUtA1fz{dF1Js5r4WyxJLQfI4 zV&K=J9w_6PMalG^tt|62r0F#KLY7`4;@yz7$iL^C$##_K!<~ z#bL~V;T#%lrK$IKD&^UJFJ8WJn}?E#9kj=JVH%%%h1$IQS83_Y{=e6E3IQNYqlrW48{AkS`5~YR0*6$1BL=N^VTOl^S-(U>@TdkDjo=HMEXakdS z4TqI@Sk?yZHbZ!>LR01NX|k^Xt>oh)d*gE_<*!PL==c2ZMi>ofUTi6Rz)5Yi6^~-x zK*Qr*oPickWD~s4x5QK)GZrW*TPT+`^z`%-1_up^VF^!p*<_bi3|W@TTCru$(%>D8tDSjv*q{z~<) z71t@r!sc)P`Z2I@W%a$8va>+!%f7AK%Ol)@Ii9{=m{o4$kgCDIzlswjjEY+YwzaX<9i8a=?-`v(SSU^%3fImOG%Qt_kI2O%_+23@A-&ZQKA}&QA5Q^-0+8{_e8|&N;Ma8`u&PJgbwBYfo(>)-bB;% zy_2>L4)nqiqA-o++Ar9oTSPk&`eAzZrs+s%d#>M<1s0Bt(^F=N-hT5~u$9^{ToX4% zi|1$aO^w#HhIA%Vv_vf4AUbHeMRxGFy!4}&6RidK?u~G9MK?sKviBdYf7kdH!xvKO z-rqQOyxvbY5lfBaY7Wb}#}j(i2Pq>--p*Dy@?M6;{ibSAZ}*d9?q?qsskI{DIYnt{ zsgnU!9fx-)Fj_o>>6GPEX&BV3Uc_SLL`&2D!9f_g1Z)%6RtJASY@i+P0VB?9_Ur3+ zBBHV5hR&P1b-(H#E_ed2t>M$538nY^eqH?)JP2^S`lF);_>>tz*Rn>a0rVyqW{)}I z)$*@zbUN2+DY2vV^88+Pb(cI4&feJ@kq5}H;$e#rl(g@U88qp8n&=FqZmXMO7g79! z_04(%QIGTB5}$7h$B${fmG+~HtH6>XG94Vefyw>+C2nJ_Zl|GcD-)}@>3q_1Dl&*} zg-DM)UxCZ^+k0B5Ot!p49#JbNU0#S+^oh{nXF733yZ?Niyz^=rfdAa*ktu^w947V36As_*5~yglXXIGA1t z--`A$?eolOj}&`$7cU)FV+%&usYwJ>ZTOS0lR{#WXd$tb%tl;j^)pIqa}mK;zZ*!d zvS+T+*Ze_ZLOBwXO)U>P6@(9wZiSWmWHVsFPc2i{r4I`=FpaE9BoH$^j*PE_7A^dy zPuLccyVnxPLPE1x*%6LE+mCl*FXx*$n7aJ?kNvWo>G<8smwa5~&LzPGx6IDY!>OCb zy@Z5}G+Ab?#x&-Fo6cs~f%9&uPB2P9L@qAoGG%OZf3#u9^;uAL5AtbD5PD}n2oPh1P2R^q>@7f&B)|>7d zjvPl6i@<+&6tPAMAMXDBG1vf2kyO`nK{8)!+o}!#n{2uWi)GMCvp6m16V-aJ(OL_?ST3jnvY>j#J-jsnz?Ty5X8H z-$jr!U`iiA+BwClXKd;Z=~(S=&r5JLI+$L?kANWlZ@z-;=WYgo7p^4{zV!F*Fu873 z>`NDoG}0e!_3#fBDA^Wm75GTsc&Oz%)!%rBOl3R$mYENKzUj9O)}8D-NNx6!;Vhr3 z#5m$+>q~CPb2~lIS$MCKvqBnpD>P*U6?$+pXlTr)THd!;I~RA9RS4ufB|%iqv}|nM{%66xroH;Qdz8 zUQBq0R{ytt!*pn=gVJ%mDC)qdyvL+u>^B1RYqXv4mtF>3H>+}gDtz3~vno3GDrk~s zFW;4!wlF)Ni#i{ODoiJU0+SP;`DKs%aLF@YwrE-*6As8iEMx7TbTDLpjg>de0hkSg zYH{s=xT&{4|BAFZoqy*qYZIE()UJlw%&McwRTu| zBhjx0f%UmthHpnNHaf9tw#Ic5nTk{cBgS%__RL?8#}MZ4kiKk=d-Klccs>?L*YI;0ABkC5?k~B`7h$4!MT+^hXByAo-?lpY`wru zgyNPXPhfatwS@>3)t}vVxJGcQ0q=gnna+dB#yJb^UApwq7Cc-~(xLk~xTQzW888r) zG2Ok=>TPi`kNxFjSZaamdHKWcB%MyY&vX%TNJDbHCot@r>cHZXO3=3_DNMo;+pq%{ zTB!GYZu6yC*9~hyP4@D4>S8se;ZJm3aH@VX{hDj$7z!b>l*|plI<;8t{;ebr)%f6; z2BU_Ce%`FNV$&{WB?2Mfh`0Z-U*8TPpq^QQ<%vD4N+R;U~q zA{*bfn5}G5*-KMxLz3v}KE!MzuV8<%vjeCMTdHw;t-jqXEiXrVSzTu+D5G_D>JF<& zHu+K^M_+y};803P9bf)JB)3Gar@?+JcsyP$)d+w8C6+wCz3)JJ1#^d6}ZO(9ri1~tP}nTy5;$EAiVAB+UvO3xNfVP!nrY7X%pHyplJ zr`Qp{kk02D(s-Jw5oD-@Ij1-N8C)$h&UEspl3v&>slz=4lyY$V!lwe>1D;vM>C5%h zjCj8_*LJC#_*dXx5sP(C+Yxi!`$he7=Q?+j4_<1RtO3n$MHku;laz6kHj&PYZD2GtJiTZs#72xCC@q z*klz=a!EiW=N3_tL4_>ty)efysDkz zs4}+uC?y672&{fSq^vnf&!~QPZ}13T&fwXbw-sS3H!$#|b9U;IuZv&SFEp$-(KGeA zzge=YKbK3re)94wckS1T;UeeEo(5A61^n6v+}eQ0x86__MyK5G7ri>%t2Nx7_g5uY zpVeQLuxI0H`y%qr$djmOq7|#{=F8ug=}d+ERq~X_q0@>2TbgX2mEdp|kj7oUe*+YsDz@eH5InJY zljXnp7`RtZRlU1!2p#?<*kBX*B`{+_lYwVRWXRMDLKbjSYF~2SU?ft6r&DnfTw_4! zohP7MX&7HaKRQet(Q4XbfD!xZd#CjMd$Pt~#tGKFWl<@N#|2uT@oJow7hwG^q?A#NUZ(vi}!Y zv&>Uzu&&-UnWRSNG$8`zayY_02jhk}ce&77tZ6%R(+Cw)A!N_sti(Z3B=u3J_A{Tn zkCC(332D&Hgd~Cv(oFm;L4Ee;^mgm3s?s$qUXM-%-=^3FRb^W*Ij(hl!xjAUI>qg$M=!sW@7s<)R^XiUO>TKs0Za|$#e zt!ZdBS*AixO72OuV8S1PgSg+;H9A?)_a0WX>7@Ty5Xv ze`pGv$12)z7LKEmhyvvJQv@2f=Bm-oEWm6{T#aVuSU5Pez+O&$g(;g^O(uZ2^l%~d zMz`6VmRzZ4D)Wx}$nksYYm>s6&oV_mbTzr}PcQDTyPsDJO{1-M;hl~6_3>x9H@T7jQnkTPK_CRMM)49qRpaC>FVe|zi z8Pk;GgAeVH`BM4*kssip?vKt?e~9f+nXiBV`j@I)=)b^;vuy*XaG3C>4M-6VrWYYs+GaUYrCE3cIUfUE!?ISPn)5A4XjI zDim5VCrWKhH2q5c6imJ|H4%Kg@fVCg=d#Vkl`?V-o)m|&DGgFny6A4K9Vwn z)B}F~uAGIG_5tg%o$?2mq`;7=;5U&X@q45b-)r}By}H+slht6DSluZE?*9_lbOe&0 zMK-(i@mQJWGhKIx`eLf|{ybZO-7e8=p0W{Fu&iPB0V z%U?9(KLyTx*86{a1@`{`D8)^E-3haQ+!6dQdopjV(Vhb{b}Y+tJreQUwDSb~pd1c2 zgxTO+%@I~56vzLUT8Z<&=T^!G%LTslJ7uO}_EIRINBn=OD)_1)VjQ-$#YgTe&KAGJ zAiUu|vanjhF|GjqSQsOSH;ij-HsaXA!rz$zaffa_&5+MNJ?k*|@4fp@I4D#_Y9Bt+ zzu4qX)JPU(6%;w#uJV03GxAZBFE4J96!i?MUuM#OX;`!!Cst%R{RFpI125S6Sv@R3 z*A$q|4~<=UtQ%Q2X06Zcbw&=TlEO&5+@{|G+rH)0NYSnYdD8k2)55<8*byTupUiWg z9z*0n>%5e8)}Q)kQ$ANugqGHMS!3!v+Elov&?6QcYrL$`&NEu~q9y9A0&Sw}Hsl6) zl}h?L9kctXtIA!$OGfSfqS8Bg#BWz~0up8JhgoIj&zYSDGcvwp&22wNd4Rk|Ue=8n zXc0eIq7X01FIiKap5BWZ3p;V_)M17%m3J5$Cmf~ofMJ)WmjyYa8Xlrj!YCw{$03IL zF$X5q7vlv)_yLPTzYlIr2r(=Wkcb*bZngUnuPo};oZ#&(6j+=moLvDH*X=2`h<44_ zl_$3UhC9g0dUCe}cP&|!aL7MHEsh@8bLMzxly_TI7d6*d)Jr)2dJbEYB3g+ftA6)v zw}ziRD(6e&_!Gt(iJ^3VbN?yGc33-VP+U%&m3%m5V3NjSYQoFLc{J)^JblM;_*0hM zlDd&rJTU6ppjX~agW6UHbEOpgn(w26e&?xQxpqRk^*F?e8NRmCi{HACXyM=cScX z%E#RwJtvECy&VMvVthzY3#WbXoXB@SD|SBIFb2z|kwq6NV0LSxFmSl`RIbPGpXt@x zlTHqN5`9*$nieC#iQWS%#@sY0c~?QvMjHwXPgu;bYBtd`-y%k)PeP4sC0)}9W- zX(cnSeRUO!yF}w#@#uJ7RJDP)54qvBc7kli$Zt*1Q1$y=W5;NNGw1*n@)ctMRM=Z*$Xgey^_} zl6Ki*tqJwhgEDA)J@eHDr1i@!d10v!DgKc1tD~VZ$orZ>|sEM>59_ z#tRf^6EI$Z;d(PkK{D)2DW~glXG_gYAX_ChP->{}=dE>uG%ddq#qO9O4pmZk)8;m6 z6+{0AUOZWqxWTmrTqF;UZla5(-nx!`6s7K9f~OBuMUjnm1%KuU)d+^GncT&%u{$)d z(chIfjM&^b^7_j-t_u5#;QEUIF0iV)+ut+xc`yiFx8cRhMoSb?RrH!5fPQa2efND` zMHMx0ffplVwqo(cPto3+BDggM{ho*+&WWKLGfe1rr+s6;keUp`A~QQRA9}}S&=>d zVf1!vBN7@vDhrp~wLGz=X+8>VoATWg^Rk;z>|4{EuuJsvIE>nybh-^g>#5tDtaH6j$(os?-ybePNW3{f7F8XZl(;qJUhhrb&iaehj;n zl0B@3L7^cF)tgrfPKccjD5U@jBLiFY&YpObubzHtsHzx{_wlDxqhpu?U06)4^dPNC z|M6g`*tKP{ElmZN6$VuZ6j`~(YDIz=%Y81TyJ{2N@9`rknt1K29B}=svy`V$lE0KS zYG4!XGjQTHgmMDX`Kqde!kTI`Go%_tYyQ<>Z8QN7{T4e zUI->$FUrp~ADhLJ6uvJosiJd%{l!{0uHTyY!a?mf_rO&_oz)DJh0QxPz-Fa+=f3;Q zg%{$&O0V%l=8S@0+yvbyehx2OXKHCRk%2rfklmYi8rj-xeM{|Zy-qlf&ppzqB^Ft( z{||#(d|LR8qz~R1Q6TNSqw{Ew2L?Avd@I2=P?*7;Qq}M!Ct1Y2!}tV_kF=5BE0WoS zr@z#rLNveJ+1|UTyBW7h(-<;rf~dDd64pVuC0>IujPwViZqYO^2kBp= zzdtL6L;q$V)$D{<4Ayls9D#o;n}JWz)0(b1Lp?g zc;c)Elaw_^Jm^G%C?Q>g`q1Va2|FpE{4RIDf?cip{CtxE`S*w*0zA8x*#moZ@9*4v zbs-Ok!6v=OaX${@3|6Bg&|>kuTy}m}N__9>q#23_o_uvxiL9d?n zW37o%sM+GUNQ#S>H9gn*(Q{cr$#~0inz^6@c+`RP_HrO9q4~lER7M_^Rc5@L50|(E zL0URr>(ym@;qD0~zs6}!G4~l!{vZ|ZY-KN&EAtMK^Er!{z0H@$A-s`oURm>{gsU2} z5CxBd%@qIPUtXW#&o5!bU~u=-`kp&W*PE&#pk!bvp7uULcL4QfaYcx%!|$3=8T(TUiS&@z2*Czg<*FogFEaQw zwZ`|w$kAg4FK-e$eodnX$?Q|{;xOM03H$`<)S!;O_pP1^PKNY^TN0$S5*)xiDB0{9 zmWfm_zH^>UPCcYTsHO@Bsa3~3#~kl^?;%f@TkV*{YSCLS!X+XHN5g|A8%EdeZ2pha zD2BxFCht7QLMy^|pM)$ZId@bev`2Ujj=2byc$74Kl&?^)(Lu>+Ci&Ac8|zTa^YktbW7?#D?W z_qFuhg~_S*8aKs-H2O*H)zw>MMii@w3}8mTzsPW`F*ex8w3wt7P)s}s!iZ_JLi-wb z5F7An>isAC?k34&8F(Ncu)2i7DX?!>U=|y7m&%KUzrh&OHIL!Y9ZZ1io z2u`T#Zd@OJK6dmcbHktdQ(7gye|i}b&K8tkA+p1NRe$wi^9Kud)$QvIhgHY#zd{;3 zmJ0BEP)RMfhuuYZH5k7TSCkhViesysL;1DwV0(YfNBub;`3Qa*2A_;wfQdBFrw;o03s(E zM(1NB5;dclki9|DfixR-AR7Y6;zm2fgzli~z`6k!*g zqFvP^_vRDlo@f2zX4teG<-%7%6)$SRDu=oGuxKg%34wNEN{S7OJ7=((BQ~UjvC*%S z%=Al{75*OayeIrx`;^$>az{7nA~uS658zP9FOK9JK}qwnYiv76i@lu@$p@?H`L~Vm zlLCW5;=#Fj@T8#^Tc^ZY|0|hzmV3-G48XkDS}~s+&YOBpBUms2Jx66eZSwZYC1?P3 zTeE)%Fz9`MBcF5q@FRh#@7k@fnuG6${yg>Ga(Yy4GH9oi6Eq>KUCOFD zl|(6He4^MD<=LAzycom84{q4%%OB=HP09DYwhC{GwtE#696D#yi|xXdPwjCSUbBs! zCta`ct%zn_Xt@wlb;@HKz#Z%uQGGAnhKX09cR9Jc5 zsTuQH`bK1oKgmx5tS;d-KSdXyddJ~>$3X7G|H>G(zTJA*cfKIhh>HQ8o)BQerw^^Fm*o7=WzEmEvv04HmQp1^ zxVu#c5q9rlWtyvKd&1vb8C8@r1lA#i@9_S1m}*)_sS=!E^ZwROoa^%64*SGbIx9R|IV(@x>E1wG^M8SB3jza4uLh+I` zSi{N`N~3%+SGFLclCg@lVww~C=TdPA0`FS=r?S23Z3$&ZOD(}5CrDne{(E~fY1IC( z@3wPayv~bGi+G2oiTAP@0F%@~&HobXTIOaFYyn@#04!Lvc}HKwriZl^1UhbHMjvQd zZ&A=1ysNCaz+d$mQGC1_r&8&#V*3hdp@uf8bPu^!Nam;$QNv~nb@xS7TIgR%mUz#= zyB<}~-{3eq`b57j*g~J$bs+FQGo@25(S6>%v)K5P`tXr7${#`IdXTV;YN&opk5-w= zih+329wlAif0|+v0UmEOpD|c-z7s=QW2RqRvsE@LGfwtpwb+a{!N7yXb>Z{!L^Ar^ zU{;HrE+6NZXA0fxh7-@k?)RrC_V7M+`%~cehbjKhhmFE;|T^PwRwNB75g4tKQ2zQn=}S#xuIjEY zL!%EUTI3j3-+mz8PbleaTVOnZ7*6hui;Pk+X=(qSDKPVYV#J(U!ZTqGrCiBlN7D98 zp>brxFMY=~$^Bux?$=79?%Qu-WI8xwmgIWpt70s%-X`8re~O}YY3zvi3Rv;tufh3J zZ`jp0A`d^KrJ2%KGA(YjqYNC8%!h|pPX{5zQtMzSIyj>~BFVK6AZ0l13onK?WE+-< z3HM_uL1EmLp~IizNk|_<^EH!B_sJs6yrnLOmQ0I*u~$-qv-y5*RTpXz=BXSSt@7S6 zE}xufGA&C1g%WJC9Wzvg5i>P%J-b*Z$A1#b>N^%wI5^p?*G)_C2TNhS-;9;&`4<0b z!i7IYxR9h9s@=Ep%%?SPNOLq?OTnbcw!r7BmwIqhM8rL2t&OCP)4xvEt4)VeW+w1} zmfG;1VWaX>hl8OEWkbNYV}*Zd4zdO`|rRD5gnZQ0 zhlbXk{an;HlD_b>h@dpFTuV6(^Fznn^IRiFiDgqt3C_`Us1S`G z+ST=e-0}q{=-=L69ZSg%78DeeA+lZ`m7{{(+yFVLP}w}?6fV929_*R>+XpyApIA$g zSk%+wlhum}0t%Q|SpR#;jQLB7qVw@0Z*ipb5%_IGDX{`BAU|JeAWgn->(s*! z;vp87nfT_V*?Z0&&C`pEU%Z1QB8^1s8)@OfpKnGIO`3V3I6$v7J1;5{g_#y87QqFs?1^C3D{3;y?I-zT;y9!w(k;g-?{|q_QQL1;RcT0nlZ=C zY*dnF>*oTp{s~MTl`K{^HVZYWiKRMgYCI<2Y6j@*_8Lz*8jw2yDHCT;PYO+!Hu6(F zNGXn{JAp8I`wPp&M*P#=&fLYYveOp?L1Loe?`U-wH_13+gZJ{gunp6Qb0y8?jW!{( zIS=PbwhIs}^c=#M&?tcwy3;5zcrB=nm7Pt-=SoA$l^XsL4B1+GAgZzOjAVuU!?=c% zEPQd6XtT<_Cm7T!6T8>d4(c&-?r0TN52~&^LFPQ`vYlfPD2sBy1p#Ck&3)L<5_$t>!@~TtH%q@ zUByV~xLPa^xb5yV*${X9IPf(c&10qXvFocrmR^4#4d{Sg<@hKzDhQ@`e{S^Fo|k6k zW?=7hmAYWEBA8{6))Wfc+vr*bGGS$>h)4WJ@2UBABCgjuk^1#Fx5b8GgzeM!W^C=j z)9oYvGk^9ZwS|bx3xe#nN@la|^hbMdx__*zH&jXvh7m0(p22%(S+^q3QhxrO_>n6< z^cOSCJU73xT&(?L;(d`_t4^g_$N1c2pgzu(#gLxYY3rJ^z3puZd28H`X1kWAd0M|& z1^-E6;z>vXgt0ICG$;q%h;}3zIp3%O?|D74g?8^uul)5ytbwgi7O>GH)iWH+@(##Y z1Z`>dXMFjD>ltmH&fMSMf6TgHQ@;N`9igz72nv9HE)ulpe{5g@u(Ow0R*xh48mZekP^ zH@%x5o&@#W7CzkKbx=lkD$5uwGptgan49Q;oGyBY6m-mXj($z?|8Kp$by!vH*C$RM z>d@VdG=~lW>28p&LrI8$G}7H6(%s!9ozmSZAzgw_r2Eo#9Fs0UiOVki~s|A#8PNaL8x6lMX5xR0`K5+5Ot-=E3(xVO22+gc#du) z2fs!fyT!_$qu(c?l-;ZTm5FYnj*IZW2$gyY9Qxv(1bc-NrlE`rQ277mvi%gtiN8oI0$U~%x^w)V8 zW6npyxl??Cs?o}m0LO3BeSzFIK6A1q(ui%IJq=aTUL&v;R%l0V7mmSMTd>`yW1PP_ za30b<@#tS}atpcimvUkw|HOgUmP7VxOJdZN=Bf9@B#%pYeE7D38{Fm7mRDbhu=6a| zZ0|?kyXAIR%1-vm70hl~t6uf*pYQq4&VKjQ$WHYKAIWTVR2!cjk_6T|cPd#X^cU&f z+Mrk%Ch@&(XVuxH+E4}In|gB>_wyueXExNE*42xD?!~>>rE8f?^^0` ztRUTi+MgdUdu1K_Z+|(as6LA=(GWAzfDI5xa4KUNeW!I77K*zn8uOZ2wZmPcxa|O5fk;thcmaofu+9DQeQs zLw$~q9}GBcE<8^KjV28T$f!@QHj^t3F<)Zy<|Pqq@fBCKEnH0OLNSMaZQm z9f(VjF~s9DvdpE7$XDI*_C?8-*3ZE>ClO+64$l*G4q|edA2%YLBH(oFLg zl0MkAX#*mh{`y72Iz-9mn+o{h+(`JL#yXmmA7km)5mV(TX5X;Q9H-+~%~TS$*^|_` zAlj3@dBs4g@18%YN0Er9ppEiL-88lw<3|Bg6f)5_l}#%wS!Z!n2M!&g*KYu~Q)-`M6Pl{l~l>Rcm(F+T_1 zav!AU|Lnt*Ra+AsxZ>3l48s|Cp+(k@$*~%7;4U1k!{;(sH6r7% zGKq4+E!ixKp_hP&?6`hqmec`v2_P7(rN#o*B6s#>rUA*ttkQC~U`&sueN5B|VqAmGY4tQW|PE zkRoQSNq`R{05(UI$K5c{(ABW|j&sLj zdtSy#T~4H5?54FZ6Z4A5jG$a*d*?V->RHF9)=zt>7!5He=^dYL+sYQqj?-T|>3_Jc zWvr>a5_zuw*`zbOIo~iqlo7i+Hg0*yV0B2JIQZJ}drbi&-ro0>BJNYw2*_RtqKPXW zl7i24m3hR+MO2T&qoGAkE!pN1@;nN6iOea3?Xv3ts6M2qs0dKkndwl0PRze5DMl6+ zb9FUp_5J0#g6OA;OjIRPpm+UGJ(!94SF`R6O>rCuorDhPWrbA50o{n%qGo{H``^F( zzi;L^y~azmLhN*le;N_;q2`J^nq_NuH*~J#vYW#p!|nJ>@9HY~uV26R=gSh6T*0q= zRRg30Z1~7t_>v99l56_n^HWiY6>~)&Nd&oGpR8I|S}E*T#q8`T+*ePu@7<2sDOCgu zBM=}I({F8U6?lLx9(eG9ZtjMF6-VG=k!0Vu{STUwJ^4L6q#o!2Hf+;)8k+xn*AguG zRGthHl9EmFwaQkxFk@D!vq=#1G`S{O5YEN@7=-)tFZD@Tlh`!fxVXSk5_aa=TwqWT@&U3@DP^Jma?yu3 z;E|1=;SJAwWT}i--IEWNKXJy~x?rJKa{6nZe1;e%Q(5Fg!^3fN>UgvVZ6k=L2`%WU z+)6pq6Nn<_Q4Z=?TsT|;`GjB(>fgNDlS)SmV(0LYV5ZsmMv2v+E2!GH&ndUedbUOL z!6meejTN;j8Ai#A6IKwd$`b)`coMG9Yr)n3+0m-E<6@-+6lF@u*sA`7O&Di_gfLI0 zhSotu14?2oUz-1lo?04}TyF|xvw`PGzbX4zZ!4K1UXq{%y~0Gcw^+-xPgng&3Zy~s z$*h?n7nbNbtlX;BqZjSwdu%@bk@9K83)DmdC1=z9={&w5mfrm7bd1}6!cBBKCPqA*7OFXrpMT1d?3MlvAJPVc{wwEVM)mto-?D-sw}!PN)%i{UXEy_u|fA`K=@ZzeIj`X}-RIr(ZrbDkr^xHZyn@%Kvj^#0Hw`3|!F{g(%b zyuLx>8rVxY+#HK8kC#wMVH(TN@?xa!ZLwu&cfG!I#mZa@$e%86ZLJl6Ap_hzx^Woz z&DO3LL9SWIBvFioW}UT;+>5(Z8*W3PQk@X>(yA7TK8?9X9hv+!QaKrN9Tb;gPw%Rg zC{89htAy)12$DMO_mnbIzgX9boZ?}=;Jg|y(baS~G>P+E!&;wN3f_orD0Qx5eU#sV}o2zGeq5P673M$tssvc zJ!8-oS83n3$XdomK7voyb7aX!pkk(YwE?QuU&;H^*1;1ovavgOldM$IkC~MSbfZ&w z&#d2ndRAeH+@xDs9gCGsKLTt4MC4U6lP#EXCHu0WN_6Qy>ooOaZ<A3S&@bK`K<4$Mi=XMK=x4>t2dz$97!s9G>nu!uOEbS&bc|31!46 zxVhK4?u!~>P^f7-ZgNmpr`U0Oe!hgKr=Mo;oUBqqpz)gO58L7;{IiT@BAua$iHQvz zyV>?M8$X=jFCC3Jj=LAwPw`rbJ8bF6jgtxjg6NY)=Ns^g^~8@*@Fp__`|VJp$27n4 zA_A(;BAyV?!>UOhw1npZyetaqgtYAStE(#+F&IDkSSKIr+LN`;_UHvAd3lu4v9Y9_ z!O|GWX*%2-Gc{%aVt~!a&CLxXg(MW3r)1-;3q*MM)(uuBUr<8wKx_heCz+&Ce%^if>;f`fd6M_O?+qs4|5pBEw?=Z~7y-w4SJ zm=lbotVh{5a1DDmnD4VW+h#>JT8=jHatBzEgyczveF0Y^L`k4g=jss6zI~h2EqMRQ zF)0OoJQ8u+O~u2c;AIzbxON8`{OsP2X#m-U^*0l&VlN8N_+$i6#(M>Qbh=UD7i2SKyhxyOLo{ke&D$-1azkk^#%t zY3l)Y1on|4SF=yyn-oK9TpkiXnZmk&btL*isF zvJV#+r@@k1FJCfIi-;H%r%E0iivAU}NP6@9v5>Y&{D00AAJ>|icL_mIwWz7zI?u-Q z@oO{$n-7SUyc!R#TPX4R5Wp|<-QmQEh&ZSe`w+VqnLI1#HB#l?@=x0X8Y~eObddSa z*EkRw)ZDiQ9^@|Gl`0opiwtR#_AM)XSgxtf)D#KjB@jV*D`c6lNEW(5DlKj9{?dJ< zdmyxpXwl*=?N#J4LDj9P@xD*>5|P{l-O;eJkd7_xhQlg=2lbeA8P58=UXspMV7F`O z_i_y^?Jvks8Gq&{VS0O4ICH#}E3d0$>46UV2EkoH$U9eeG_{rPJ|!Uu%Ji%=ORYj_g?Q_A*a*qIS{$miFIxo@+a2$G>hg)$3y=x z_c?k@OqISUx<$lZ`UR%p(K}_dfjHO*_PZOFx%xal#TP+F2LBN?i3i2GM8lCPRmeG& zwoE5*OPP4+fnqF5DoZzjyI$@xFjyss(-EX-AsupCBXhIg%!I3 zBO(~gV1CPf1)adHB?Q_=lH7Uu|CD*XOG`Qj z5LU2~bKl$P7Q#{=9W5%N)>$gF5W1(<5F%S;p+l&Pa%D?iTwfDpsCVgU@_%sa}m|cH+*#1PWkApF-e?5 zBf4LSik#UmCg1e$A~=O*5P9)8Q?CWM`*BYOO7{}GKu-RBxRjL!=$<9d5)P8@pVh%%7n2? zO@oI#46f8|<<4hXTL~f!{+BJcXf1}=vHrt2j0l7jIHIZdpv(EWdRQ!Ma=9nXfO3Lav~4c{>_<({%{wEnpwWB=Kv(LA`0Q+U z&G9@JS4!TeIMGxL^I6Xvi&x^33J&51=dxHY%0ZM@;2d9tQ7x2!vX39ym}yuiy@v~b zW?|zQB17XF!ZMbKlhat1KgDe~xiVfw?B87xf@DOIimM$<5niy%Jh0D(u&3TDEvfhUa9Oam^N^<2Yvqj|Q zbd!@r1y>#db-oL%vOG~q(Kd@Hbg7L?E&saCWj4>-h1l(WJkh7mZ+hV`DNI{hAQh01AHnYu=X7c0(x z^4T$-cd6RFwIx1usd8=g+f4nuapOj!L{0D8laKRW&sJF<5-$#5DuHi7plP9UN?I}4-f@#(s6zrye|GM z_5yPy_c|T)Ysh~9lxAgCDXwZ-6z#AtTatlc+i=0X+(iH4MYp#7Uq1fgwEsw$0M~*Z z{uj6agu98*Sn}tiRiMD#G0($N^*o1&miaPTe7hiDM}=hf8E2s}G`yHmnxOAGEO?rBsQ zmINj#>vI8vfFp<0bafduNhk>rfd~0U01a+#mKy5lGFnFc=TGVg?P(;iMlE~#0|CJE z_a+2Dqf|3#Dg`~Va2N&Hk8RuGR7g5hzuo6eybKnVt)LuR;)_%}XCWqi4`y|?B3~|Z z2(>w=WsIjXwCZ^s1PkIV{juYU{RAwk4yS+2JU+-<#EPp z*-~t7fckn#?YK^79XvN{bD1CiQ(PFrp$&G>UQRZxk9FlhFA?XWR!=GMB)F*mnMN~p zv+0d$rQH#8j(s?G6(g>{`Sy3Jw9SI?1G_?=-ob(hqGt9;IpOrJRI9QlMhB~L_->$d zU8<4d24^>gvufXA6LTqtT}s{{Vbj=AW@d1sEyoFAs;B>*UDHoNx1$W`;Sex@${wVX z4-E8!t#25lvJFmxjRSoEVzDjxy17{48+|5}nOR#ES&2os)t={V4kZak^QT+cIPpF!^Xw58i2lLB`QY$JRCEu{9*s-G2Ixm6 zekyjj{GGkRsj=R`bWo=u{H&F<0ip79gaOv%9FD$Sgl$$Wq$4oh1UmOZ#jY!IGd;Da z@Mjmp7zc6gh2B)p*~FPx?7+Nh7=APdS<(l4@7UEltJF4iV5na3#f3|n8AegXeQ6#O zfS?U@F+wE^u-h4>Ne#`1Dg%)M$cMts z$xcmeX*fs)CE5<0%aeN3a{3{>xs>1e?Z41z#;6(Q)PxonCPfAA_^tOX)+%K!RAPh84HZB*E;m%OS+Ri0l1~#S%SqWN+6$Dr5jC<4a;W5umB~I4g%>s z-nuXn+#g=5kfgE-!~cw6in_>_WJ3WcqygG$3g;ul3=P-pKop#?ZM{XQG|;idz#9m1 zcS-EO0{Ifc5Q)kSS!fx2Ipiaau#eRFup%_w3aeM3iW&s=5voF(AF9Vp5B{Y6LY8VY zUX*%)r^&)f1j%$(1iC;dx|46cy~1Ok@KVG?%8(19C$a<={3f=hJ#}V}Ltb{?wl!W+ zTz~N}&}p0o8Opm$JZ5UVD?rUs_n|V^1c7kKik?avpVd=&5G#nx024+R3yYf+xG3OBh?X7*HQDvmqx0)|{{DGvMUI{XfLu|1#XSMK^EB;RQgROb!ukkyvmcq7s(ER! zmosSvSOypXCdC%@iN*uy4|p?sU?|fc%+Zi%J`@YgrA)_M1U;t1by}DF6$|aZHcN}c zay)QNp#4vA(%0Ab|0+%f*3{HIao3_g15e}T=f*c=HHiO>h528^sDJCB(qFyCBVf~~ z0|s|O4HM5NpJS#NxupRJ$j?(*)?PYb%|i-*@@aVJ^i-PwYE&Qw&PHCly) z#V&{Q^2Jw4doh{mxCFjBZmW^2@h6I)OP)nuC*&Y~5k! z&hp1c%m*vl*M?Un-dSw!?$m$U&oWgwBO|wql~p!+df{4+h5r3{pi6}XEN#vVQTA>n zL+>CT54YH>PTrwj2s_wkJN$mPDlFXPQWISk-7ERK6V5XtZtFZg>~!7lQb)sP{YU%6 zCpLJ-X40JXTcOzxWP!1*h=U^J1=Z`CM1m|BxRPJTh#Y5YK~t=E@`ITZMX8tbNvEH0 zd5jg4<ki0JU&3IV00t~epsEgNNH0j^Wt;W1ogMe6+Oxv-+}*jK@6 zQna)ZxSm^_CIuNZSTqW^ChiqY5P$-diO!E_GH*t95m*0qP~gSot&80J{l;L>Feih; zmn0%$@N~XX(XxD|J(XJ#IdWXl`$j)O!B~vUDh6^QIqmz!iB9b8Z$ZvMI&`D#omZck zgT}bO5eVa*33jd1Z-M9Fpz5U@gvJ3I2Hk{wMS_n7Z+d(APX@9gZ2C&9rTBcp$7k14 z89|0)0qLL%@!yr=hAVRl!@_$C4L)#F_!ObzQSUBE6V7nmC}Ahbb1_y@^1GM!hS>%2 z=|zu->wa)7P~?6jaIQWHrDfS^l~OCIv1OL*pP|%I5)EJa(SduuxbPV3f^rxXB%2Y3 z-NS=_Jd+o+(kN-F_SlK=YK~btz*nwnF|PyX)ueX5p(2ay&S&mY)@zw{>ieGy>HUl~ zLP2x*TstaYY*=91Q7xIlW~6hS!2CwI?3I(WR92@bapEi2T0kpB)0oW=}EqI z((i;l>Ds@1m>V>b4Pn0cBo`UIALZebKsjJB2{>d*g4 zq_xQw`vmWLJCU5=UMn;7V?bLYY9?sad`{FQZb~cZ=&DQ*QkAfl>iAXQ0_`+Z^_d+t z40S>AW^uqF< zSZ_^i1tFr4f%k36y$9QSy^E{$BPXsmIQ-^@_7v3Ssa7%--gn`?qaPQ^zJ@QYA-#F$ zK^JZJ5VCx87}j$kNOYk3la;mo2iv!`7D!pQlYxVJoYt)6fH*2Ds_o-N!p>HyKr3d2 ziz>|wO^wZHF--SBXUvXSR4i#2rM+;Q7uzl!hPY|3TkE1};g?q%cwiRXy71x{>%7~; zwRHMIKh{NUVlru;yHf3Ir)#d2>*mkoy*>`gIB!*S*J{>MFTkrhQ5VLEr;TpE$-_=) z0h=8U-Gf|tY+Ja5scCL>E8oQ$l5dWLtCqU8%5?%jl*`>D%zK&HuPx!jn^W4k?nVYc z({Wi**~y6Rbt|}az~H$|Q+3CUp5-C^tX4DA)Ln$AKs2O5(Z$1YeE5EzXFx!me(qf5 zYN!K__NiP*V*%MZ1IK}A5eq8c6$0wwnuMH-#O$NLq<43pNfU(H5Hz;^V8R-Tqm+e0sa0}Bk7qevI;?kz zaB*>2M9<)2W9NZCR--BtC+H~1$d#yMSupqWfS&@DEB4dbCdmkXo-t4ULoC2#m?@*K zo^aMc=IQAMtQXFh}xP5!)DrLSjBHhN5)0uBY3X~>8M*FO(;$T9Cw`djUVtu2rd zO2jl}wy7atP5ytoWQY8n8ltqC_iGE)&guVS@L-hzUNb#oT%Zkg9n|j?BP}N$cOsfa zbQ!Ma7(}zxguAXX7SndxM{b#cZ{LtRx%Y~EL`;IV5iQh-UcZhp9ODJC93PRXnxtXy zxE&GACRmpYm&#lz3mK@YLS5*hlYfRvVCUdTal|B{fain&iH0uf|8w9JS9(sX}u!8vFS?2>4l`mX?m?!C?n-N9Dyfam{dgu zscXbMe7HZgHn#Dc!(3R(iA5^$nu)#t)TyO5@Qec8jsX&VG!rRJ4Oe&m7K;`HKL(ou zDnx`Hq$6=Ye>VFX(x0#OO?sgpfLA!ke6z7XL?%K6EXlb;-Mh$!R2!Yy9X&@{LwUmfA>}pLMX1$~;+Qc9#T; zElwUIPBycn264oxjY}%(zJ!kSb9|7RC&yfw$f*|g+^Ee$wuIQYnI%L{D$|UqJm}3E zm2@IgnxadhNTNq7iRjoq-7#@yYu5bu$jecG9sf(S+9(qBIW6@+wY`XzjO)B^sFnB= z%@hB1EzQo0#DObql+$cJO*9LKxb#eGQ$ zC`*I6Ku^g4_iF`7i_2_w>DJaOyQVdmh!}u0#*5>$eyDtSqsctmcfYtM z2~8!CI>JXB8&W@zFQiZW02?42W-+jT=86{Y79n=Qx;5eTA0!?=ORgy%6>$KS2)aXA`()OSP z@icJ}Pei@<>{thwY1EgbA>V%}dPpkrK6#}_HJ3BGqW=f5mHSdmd?PX(y7UIqVQ|)( zX3&4>a`yAkxgN_thyok`e{ckVG??5w?U5~HM)VBuU@%)HVBo zYazM0R69Fc;Qo2^!x!^$=_CY*gG^u|2+VW0?`mBZLlp4+0vMNshy3$?rIft94|2>~ zJdu%+wRTJFz_(SOR_le@U`&6&_C%&b1}b>2CO3ie1g)n@1*ujTeyUG8s14SiDv<~s zaIa*7YM3BO#~Zwe&-(Mfk;GF~nsieT8hcPnNJxC!*r0YM3avvD@sWxvn4OaaFho`2 zlkmCqHa<)&P^_D8G_VH}EBP35hki+UD65Pf1-FG~JsK{I5DGm6{o5t<01<@Hvgh-D zhETTHe-uEU9+XVMWh7kZockab-Z0}?GPGFKSQhnt%EB$;{e_0t7TV*nX1uUwSFkbY zw%%`tUaQ5Nv1Pwzg(|%@{)AuomLk zqJ`1KLyw^nZ$-}8d)Hs^dgl>i^8U%*7t{+A; z(F{!48S*=c2P<}rqvb~xo8cff5$O!EX(>U77^qnNJP1INaM5H7oj@Tfpd+AqZU@en zDTQ}|8B~GM;3s}=`$R&&DRkaDYAHg_i>Mm`ooYp+a@?v7_*rU@0ykRP(H#ixST*smz8d-09$E8fHz0lhjH7=$%}?h$+LJt>^G!q5AVGk`I=Xr`jYK=WR-`RwH0_ou7(%3oi{- zQmu;S?v%L9wmI?0biIoZt-rXx#i^B}Ffi|2W``fpme7rScp%yySFF_g7?Kw+r|La& zF1>DM+O`xaE2v^_3-du zVSDVjE5ZFQug4!;3?)JM*Hji@MoUSr9dJ|cJd}G!Y8gL*=#Y<=YKB696WikBu{=CH zhRH&q-^EoaXSPZ&E@psRIHG$ZUO$p;Q^!+H|9}BddGwZ=U!Jeze?(_QU``)qz~2Hk zqQ0alaOn`^l2S)E%KV=R*e7zs`cr;2dfgCA%Bov+G5h?w}2GL4%wRDfh{B#`{c7XiWZufz49S8k)WbHN+r zdo(yt7lci1l)~4Be~QnOZ)3zfES;SD<11GPL!l(_#+va#@71wVi)sF{lUOQ=&AFr$ z(bzULl|2JKfSN5@9$NHDENW=l6GKT-t@Z_ydEb0eAefFZFxSM}il~^=RJ~S0Oo=96 zTb5+6O_CP+acHKO>8UF8$oot^PKi={(Ef>2W@ADwTKv6lXTZ^WC<(f64v^!z9?le3 zyw4{9>!^}ZsojgsYbHPxfS|)Yt;K&oXHg}Gl7`+n_VQmJK!!@QO-IM{VN{{Ej|K46qzQeET&s zaNE)wlen0owxoyW#UEuoUDN-v0{v|Uu;Y62O_ShQGqB6O^Q2I?lOyo(&pU$cx%A=_ z1T=MP``{Bix$Sius?grK%CDudtDdjCUZ`8~dy6x>E0dm8L*-c<{?{@uCm(`D7XZoK z+yG~@KQW2Wgb_MIhcH|TEM}sUgo98(sK%4X^;xGw|0y{0#VN{szCfRo@8~_~7m;b1 zehSTiG^3!_5Rj8C+qzs zrW4I@$gY1X;Lx8ViEZU4cw(>#&?aPPuqxV1I;O`VO=0ULKvB!l{eZ*2?%e-v6a-Ykz`ptK2a>X)Mr-D##Lcx^5r>wM{#D>UT|-7i)i4&F1kh^kpS%E7 z2a0ZgSom510PmpreCPaooQ^BxQYiq=6lixl2EOeC71(~MyD$JQ2WnQ}B%^1Vc46F< z@|I7L+GZDUWdA3N;GoUJ)mI0P+ntoUH>*NUCMqL<(D!|&%jexw6T$D>DL;tEscD-2 za}Q;4q^LkUu|O@k0JEy2)#dVat|js6$m{RD8~QO*#548~Jwq|+km@|iOv z?~E`4k6pxiCtHH`JvwOTl^wcbKq~Rr0M;15)O!JlF9}#e2^q`Dx#v_8oZNaG_yiO( z`6Jx`{w{m2qW@#laByoRUuR1C>n!(-Uve5_#1!^V%Lsx!&FLhl3=WuU!BV}ZE`6Wm zDEJJgd4vu>V`%?SPv=@@-U~Vv0X+oR`a(cs2ap_%KaJ#8%vuo6AAhufKN{+LLaJBB zZA>rNC3gRKmq*Satb=r35$~}`x3?o>42k;8v5Vi(;`IW|gEN=X&!a1#JdnDqlPI4v&TY zEaf3$#WW)tq}Fd`q@$LpW0k~CAUU!|%Mx>O97WEPM?bBeCXDkb*jl;gImWXq#~WeA@0e#)H# z$0);TtR->53pu$B0}}*^98t2%BIWdUT`=T{N&dPvk22Aik|b5s;r5ny^59J$d__LE zpBM9qUMqcSg*pws6}Z?b;L@FU*SToE6Cmx;x44LN5 zam)HY?A1Tpc~juw0(BM4K}q?l`RAPp!N)J;iZURErcZVl#GMBBtf zlX)^RsB~xyme0k@NZNlINgCC_oy0?Ix&PsJW7E3IT2?L;@ux@G5dYA$LZ>Eyv$*N) z={IVZgIQv6aU;t}6fTm|7(7DCp)~HvmX`;aE+^yTMvNQ)E9X%DgB^uk`zqMrZK=bPxmA`VB0pYlo%`hzClyZ)W~pt zJCnY_H*@?Mf6l2J-v8n2iPUBTrs?9s9t+Km--dE3t{>?jahxr|lh|gC`+FLeNC`e0 z2Shb57}vKHwDwU$ck4v9d9&$R&JeDG2k8q+r=eE(!8~6~H=)qqrDYxu)hI+OO`S@0 zJ(?a|v~T)+Fjpg%Gp1xspE+!N<)OR*dV3162z7F5`VAxXREyOOUz$X{9w*n1qlHO+$GBl{!CDZ`J+_mc=Wlty%~xZm1)coUtT4^f&Ku{+N7 zt8Q(=q?M?Jn71!dAN1!qDizO%3`m_{nYQ>CG9O-iS=*;7h;>2@b-+>Abk$D|sx?T4 zT{4@Tr$%<+mr*77F1dSkJUspjo?djX;5Tr@p->^xgJo9&+yl-vU-DQgTV>KG^_vMh z@D}*}x=bcFqPXMS=^EqbgC4;UkLc&ujM@>f;UH>MWnjif4!^e$ik){GqY0$;;icy7 z=H{=cov%@jv9-e`$QyP%R}bMkV{Qu)!IAN;-|Z^D(iW)nV=fuxp-SMNwQvj>)cA() z9R2R0pw5+bdkk=8A)r0*`^>0*<&X`$j|s9fnr!V#R#ay*i~7LYLyI;`QN138FPbgT zLQeUB**#%aVdTryu$m;nSVuN55Z*U2pHAX4)A=dFjWWHI^x26~$t8D=H}5%*(=UeR z@i`J9sREHE$(G-G#>IYRLp}HE{!4F$_&qK`p@He)JIi)B$dg{2$`Ouf2 zwfqRoQFmJ$aD6lP6*^T~GWN+%LL622kgvdv^r zTfU&?QhaBgPt-FJq9jHUZI$I9D{md_^i5SX_`#nd9hs^a2@VZ|DMwaQJ?NME(T0sh zwzK8sx&EvtQFVsvvt41A8m5bdm@Dm&yaU0sYc%6Ab{taKpXW#2-{b@xS?}#1GTjwU zX2dQo0WUhU)=NC-wW?j^JN7J}=?(C40h(n|pSz9d(?d^H-1y<6@bUPtaZGjD2SLDH z?6GqD0e*eBD#x+1QN%mYVxB!E|930>aOEewcp+Zr+-G{%44mB8j-kj28n+-MRJdu) zOE)i&wI#Z6iHK0JahWIT?hy9r9E04WeODPb1B-<5 z{63L{jfjrK+4opS7H-Z@{TFqt>-CtTk~`mo=8;0#4o%l#2BfGO=dGKT2&BoHlc-!i zI@u17_hV;EbiKEgYAwz->&9+@#$Ss*!{DQil-)r4yD#ZG@%gmJQ$t#3j+e(+PF zjvvSmC>44c3ChlN@&u{+z)Zck5BBsh^ibzoCwrmP@oe(r)XK<5-C{nZr(P1tT-!^{ zc(iI#!pCshz4AGRi{4-n^3#caI7St$ z^$re{MkL~*TwQBiI5WEllFH^y6awLym*uyYxr`4tfk?Tl%)z<%lrgV_yi`n%I&XR7 zE#&6kUec~7kd_j&ZqY?7`OMT&?wuti*^4ighq2uNER%+QJI2Q z`Bg31P|Pv7>Je}?-YJo?W(L7E4j*7dR{Ys;MHlDX#VyfSRI+;|em(cGE8jb(fKkGC zh>rBp($n{+X7}~R@--at*judTQM21l94|*`0@mB0Xj^cq|NS%c{?<`KmdN!9B%ZydB0Oeerz5LgDI1pZMih8*(@-Aq%|f0?L6Xl2Gx^hi}!<@Q{O>x|z#Po%xa-1tUmcRJawSa-U*qubH|^Q;Q5?C?hY;cCLHLv1Pt z&W71TIlu^f(?G43s&k3?;zP z=f6iA|0De>6?{`j#+qvc(lp?cNiv8D`=>7jA?3s+&>r26RX6#IMv2amRBI&{_Zwp|3`J9W>53B&VD$o_=Ofk0$;t5eYb(sx+~^W67aj5xe@vi6v!956DeSbF z#5E{@<_mO4Bu|?{EJv01SKCV#JS~eYC9cy?1Xte?AS{K6{3W0OBrOa}d*j+Fl2mXrxFzcIIozUauNwQ<1!|Kz1rq^c!M G1OFTO`8qWK literal 0 HcmV?d00001 diff --git a/priv/.BUILD_images/Login.png b/priv/.BUILD_images/Login.png new file mode 100644 index 0000000000000000000000000000000000000000..8b8320e3230a82a5f8635562ae6ec30691862b14 GIT binary patch literal 17361 zcmdtKWmuG7_b*O^QYt-^ijxVLfmd4sgD`L`6ZtE2jED31xY0&Jqlxe z_6R3L{ONOBoS<<3?|}f>PJKO82Z`oi*->aO(E@u~h@PD3i%VlloLpz_t82KO78aH4 zDY|XuE}nZVFzRol6_p6Ov>bnPQlu|b>5!2+xt8o!TBi|qu!Og*ip9|gAFiIaV|2X2 z>tOIJa$Ogws1Xzo($4 zYPY{r5l{=_wj3kZkeo-Y@f;f2HzuDb?!v~aV6PJxX-%^ za;{QeIUMMZ-Xz1u9LL-4P6|O`m0Z+sc9J1riH7;nP=%aejwbPr zh=@WNFktk4U+iA(u8B)PTZ9Tjy7~87bj3|R7RRAedfVNSDB9popKcY8K<%$B<^pw; zl~;Yda8H>9f)`2-`49pwF&5i_c?qlP8rLwtn{K~1MkkxY5>a}j;g1K?rKK}Q`UaXk zM`VYTauONdK`7MnheMIl%^XOP)~o`d4vWXYkyaZ_aNceQk{?- zzLySgM4*&wSLyy3DkcXy*&X`~fAHn!_;==74y$2*_~#Maj$kNkD&i-|{NhId*L`3e ze~PwhHjUy}v80PWCsJ{u#t%JHZC%XsawzyzVbl^iXkD))JDV7sjxw$-7AF~PZpS-@ zJjJ=MTRHMa<0;upn-&yc5!kRFE4@|91GA#Kxe-mDS**{#>0QGz?=NH@M6n|fW4t^X zzIT=WLr$yRuKA7>?)Rk|#;0}^e2>J$PgQ&D+#~>~uYel{o``zxzCIkV@ z%3c}aO?~&+(~zeqRV?144HoRWp8~8k~PM*oQ(%! zEjse!Ke!TU60g6bf}Rcmr;jF zX-k2E<#}DVzEg^!lWB0R)-U(s0JEzNVb5d*N?`;GNKc;g*QGaCH83coR<~wshuQAs zw+-<1oVvlh7>4~NEYo?Ie6X(vqVNIZ7;$?Ji@~OWtbbVJavObd)t7nyuJwAr4ncTz z1vaLoPL{uAS8dC-yY8!uw4J~y6IS@<&WD;Xb7f8{j$TgAg-^-%RV+vY0x9Q~0hXKK zg7Ci4QFMsSt>LDif2$o{$XT5BoJ`P1S|3u!(xo`VO%=OBI&39wO zbQSgX|(b?h1vc3&>Aq69LwFM($!6r5C}1xRdq5bgorTi0DZpLp+hA!#7RO~;ZO?f6iONS zzFuggQkyju%c)Pg6=rrDMOWpqFsQ}qzGXem&HdY4Gd4ZOvw(++|qXKJhV#@iM~B8`*JgHZ-7i8j{tR28JA4>};33Pz$w7m6@6i@_1@~Cmg zS%B6{@LbF7)i&bCrw3Ikk)Rr!saMWBJ2`a1B4%hIrd$2blngV=(Ze2+x=@Bw3rKYS zpnKADKiOiuCy4>4|W)^07C^UK`Sn%Wi#>HMZ>#sj7|Vg`@Sp!XxPKuk?T1x6Zo z|EtZ2JK?;prD=K@wp5v@xH4nAb!MJe4GXK6XSjEFN31Y&ba>1fkvO`FyI-`KKH?c_ z3SE`W`J{TyeH~Jt^;U`{zvD9jInYe-umHZn!Yy2OS$>W3a#3BTkud!WJI5^J564Pu zE+?sEo4!epqh0;{0JDg4wx^t%E;?3geuAHMq7328t*-f4&pXb2iuPfSPkeR+^7qfW zR~6Q~uW2}xuVXp0DeQNvo$YVjXPA~oc4+UvSPX2B=M< z(}%KMLz}B$8fr0HlA~U;pG0q9l(dTjaQW5mA1P6Qj+=1#13t@A3axgF%pVGgI;7}* z^^v#fo_SZ$y-8;(GqQHjmY6Q384V~T6P-_J+nRot9qo)&Wh39%1F33BZx+c2p@u}IM=0GFIVqIv zmT&WJ_rI8qaTNbLgGN|QCipqR;b*%RnJ5D@jeFTXL=3}Xjc$ppy2p&1D;_=vMl%9t zk!i2dL69PLe=LHZNFS#IpaU~5d3~6wj(>1QpZnGaM73OC`G|q)fPQeGQ}WgaoWLfX zN*6I3CXqL~zX;V38952=0me0$z<|h)Q|s5QQF4-x*2cGruR%Tt)EPbBu$q0*51+}% z&5-$#ekv*mi0md2N5}pkJIoITMP?~ai~9DxIQyL`eXH5YbD2Gd+mFukNOx-Gq&Adm?6OLE?HU3=)dK8xzY7L?P zjH-5o*&AAffi0gmAVIkxi+g@{KO5|4;U z$CJ3t3B=JQ9$`qoOA-wFJ?77Xf4n~5 zht?m%NBUq;*iP+2HZrb{a1gd7XmLcm_2sKvy+<=5!%KF=U!v35r<=6_0e`0c-hllX z_u9nlm_AFc1u~0FP8aOkH$662bn3&^K*7h5$YR=o2+)4vbQU%0$VoD2z84o0UG&|ah7YB|6Ud65KS;^+6_gag#Zpo9{d%L9 zT%?i{lJdeN5H=0JSaRna)e!2{3psZi{Z`syd7Z8}!z9y>oZpzTlS7EapjxocmV}q?lLK@P0A#4!@@kf zrj{`=30Kn<=*!y7j9!ihm=wES-dN^SR1@%`XcZ^GMt>@IBH+>p+V`Sod)t^HxS_1Q+c(k&5fsXGs42QZxqY{mmfZ5AF{PV>fZfGHD4bN)5 zYxhqXq1&cEru12e1%fewBN!Bt@u-wE15Fo4eU^@Sy^Dg8zt_$@JSS+KYyQF{(%P0< zRF#eNu?8`4ZdphnloMx6<+DDZ&oHce{t9d_E%OgqWN&mkA?LYl zvO3QYsrrae6?B4sPaz?7L1fTH)3StPfJ6;z+PPzrPvJtNUA!C8IsyJ6LQah;VXZ3I zeYPj!0HJ~>;6F-1C??(T`UjJXXqfIDf7V9{F-syOQ(=P~PTi>2QG6qp%= ziofbLUMQ}}_lt7F#BH}|XB5)&k`*om`L>!r3?Xm~s}b zIC1l_G*D(YS7SKIw29n^Ci%ep_%Ln4q5t~O+-3gfP4kU*Fu)Ic-!84xlVHp3sC-O? zn%D!Wm+zoJ1yk`5p3VF=UyP$}TOL=XhKj=)3ZQr~Y+? zS3RVt$;I{H;IXBH3pQ+S%VM<3Ygbeks`j2XDO?qrkR;u!IUiU4^V};C# z=d=f!I?utYpNvPeXxYed?*IYH5+;#Ayy*ZL>fj_X$`zk}h(lmHwfY#)B@R;~qx~UZ z;_{P%>FbcrR>qEPwbDUPg@L2Tt*mW1pHC@y3$#gMT2&%`o2lY3Ub9sdq4G^~pUhiV zB#{7ZMLHs$jEkwR^0ECDM;RL0p{7g^{Wl`{NFPj^1dZ_!x_~=ag&rlD?yY(Fxm0}+ zEfLh=`q|C5Zyu=}c^ld3C)pL3cG$c%mZ|+xeP>ji{B=CEEGWc9)JSdm3ul$tfX;M) z*yR|^`3jmjW_`8P;n&A^l#zPYl%ipLsT=yoIxtf>T5f)!5uM?}%;mlztQQ!cs0#U! zx-F#T&*0IXx_p$h6sE|b>wp)&;s$BqvFD^weAwc|uMg;&;*EZ@^R(k37LfIrb9}YP z6rX~+)tE$?_4rLh7KEnHKiU}m-EY3e_5)ukJx0av^H(svEv}4bX)SW@A)hJFiNwNh zN%lPXmE%CfFnU74UASf#J^W)OwZLS<+Viw}vb{yrNtBp<*m;1Z%KuY(JWN(c!Ti_| z&p+GHa)XA=5nw72pA!}kqM`l8SbB>DE`EAEBnIowW~XKKNZO;|3C|b3!x-?p_Yrj6 zNz$qM^->Q~@3E;*`lYV^qGox^7{a7prKmJrtSJ!>4GY97GI&UaM8t45>77Lk@!uGT z`xhDi3-$gRHo49EAv-(PJk-B2wmlngv%wDFvCPevM3PX}Qa2e)6BsMj<@=}q`F}!a zI{*n$kBC)_rTze|`ftQSenkcTo-CDDI--~N51~(w|8{AR1MfNAT!Zm2^W(q0hs~ca z$nKWxOW|`g>ZOP|0X8~|1lLGa9j~n$n&$E2=Gb)`v}XrL6RUf(^Cw-7tSbc!6G*O| zdv>8T>~IY+tEW7hz%anWW2FT%%2}D2Jn9Hxj}U=ULCfwvAK>0$B2(r4s|E(Uu!Dgk zZm0vz8oNKkuTz+k*ht;ySmwgZc=c9?3VUwL;TPI}o~}@LkE+c(G4{RKKZ4-Vz|2Np z$w2m)Vtri92sma2RCbs!a<=UA%#c%@-O`Ote=1=<@d+39n=n15QT}urahYHR7mR^c z1MeW6^#!R~1^2jxTX)XlnL6-$n;DsXwhO)d>Me&@R`g3UaiYAd(@8_b2_mz=en}xN zX{;TClms}#o48!jM7U4Maug7158D=0J0{RCuEVWf4#HI8?4^Yonr`vmPgS9V z)Hz=XE_lL=)Az_)bVPUif)(9vTRdwt8Pc_N?{nH ztrTsBm*)`*9*TwuR=;^AD4c2AY$8gn3hRJK9*2$=AFqE7ktSjkdjn3X$!%&0KAB0a zEY!8Ndu!`K;?lFjI{(glZ(`GdE5v=qr>Z6-M-pC=RsoSSH7 zt=3Hx^!HQE=w^K+<52OiJ|D);FBK6~`E;9ko)*_`u8>ZuMx3xGR7eEK6hdX^Ik+9U zYL{QtZq!*FZ5~d-N5p776J`hj3U+=?p8&3aHyllk{tsnErRGBwvY-a3@_u%(ZgWdE z=V7XR54TTG7U8#5HGn}c)uVPRXPZpj%`pVb@jB7a_v<6(`ReEvtx6T!Ori6L{_Ev# zwF=gKqNIzt0ZvA}WiRq~^O(=CSBq1(ik-~0&%rpLEsmQnz89CkTid}@&zaOQyIo5i zcTnjkMSCPhd7?_N^aohH=NB5*_%`{lrwY=AMw7Eg$pLe>+?gt0b%ytATio^2kxy)v zuS!o7za_ptXhF5OUCsW=a>_hvjh@h#^7MmO@dhdo51juYcXVwgD&sT>s~A>jl8NfP zw-H;_>>R82jdQ{(vSJ_*1wXS?M`;@_n~#r~ixZe{W~r2Nu(Lhp_LI11gg=Num@jqj zWwMM(h9}sTMl2pn$us2c^k~*uLMKD7wh#%lE{UZ2SGWZ`@v~5+%q%siZ+4~FJu9@= z(UwAb!})-Grp76H%3R#vinnEBu)@lujX9|SSL&==s1#yRc7P6Owx9+tG-b#<;M30y z$RGN(4m6!*D7w58dIFS>qJntUT0@>$wdO9O0eyK^GDaM4^V&=kVN(yzBcM>HnhwX^ z-YqHx-?C2tV7=a+(_{~)B_}1OcB0C|lozPcEL@FhGx*H~_5O^!Otpg(g-BmsU8@05P!LKe0Y?Peh1)VUF{85DF~uyY-IGLQ_5 zYuZE?Jm2Ox(L$K%T<_KbEnzPoq`)=?N3~G1Z$I$QZ1;6KIpBGzg{H_cI#oy=RZObqw)sMmEti+5at&hBCg>1ToDm!KS4&B94zx)R1dVx0XA zr~853DYvbkb8uS{x;$ACo2c`{gyXSVB2fQ1^Us>|R?YM5!b{TWy0#@OZi;iMyL(J@I)7@$#=V zmc@!J-UaBNjCczkz$_XZ)}IKtxNgl+Ty>=JfgBOMo)y-W7y~PG*Xw|R4%567_2~o@ zS)e5*78c-GCuojWvw;P)!!pe>Sj$VixIZo!tDIT!BsQfl*w;7s5}G+!Finaqw_JT^ z9~0l{xNeZc45s+rC2&0!JrLGKgkgq_1L(dc%Dv!O@3Np?C!XmmJ*E;+qR&Y9!r5Q; zOi*6z&OTB`!8hzlXIQ8#cfh;!!qeQhSWP+{j=B$|hEBhsZrNDrH1k&rsijsS7d1Eh zGbd(Gavv({Ch|rE8T+ubrJdzu{I*VqunYG^=%3#{v-IvzaF+AT>N$=w!<(vm>+1Qo z`Qm$9$~KJH-1nSY&|pz^|FL1?>17>qBrhs`a`np!pGyKk1Uli;GcLDIjRkp<7oLijEPsS|_A(3WPi{%dS z1xkxn>OxG5$LC|87jM6pTO*8uhIyK86G}PHZ#Se+zub+zuG5C1z1{m>uMMR%)q#Dd zER^uHYq-vWTIgQV=+suL-$j8N#}sGY@UC>yT8`Z%BnLnSduKf9EdlNSB0O^BDF zo}+7}l1tXj#h1>WgOf!Q5uYNV1PTE=C;$b)p4U4c?`H%PX*?0sTVhAgu3+&ZptJ72 zf?1o@&1J;C%HZxQVg~hmG?kV;UuP1o^5INj%=L!|nj+>C?Z_6K&l-Q0?0MT^R!&uH;yubL1p?PSAl(GU3_<5Wsr=63L=&!RE4&pOP+XXILyY4-;1DBZKcU8MR zXpc)rkf|F!z<$5uq_MBkqUUyKBo}I!$Eh@s?VvOO={880H_1$s@RRS$SxcKO*rp~z z{Av;^Jl7{wskD_G>(I^%)D?9|6=HJ2vrLC>q)?w%E?{Ri;HuKm`o1se`#!Dr_A|xh zlUMkdTIBp#)&PS{C7bI7#o1Fnm{zTl`@5DmOYGaXDziwGhr8y%Ex4|j^LzwOG`@!i zbSYg|)kpJ?vg^G;ZdS{@9p-8+vNk_GH&buyLj~q))`?PiI~N`OWUUdwHG+#*3F`y8 z-SwzC3aFeKYTH?nL>` z9Tl`0QJu=n`7JC3r_6rZm{61vIu`QITU8{j*n4XfZi*5msnp2n@@eoIVG}=_#JE_F z?LyKzF?SPkS9SiwIOVqOxn;Z>>(cY5Fp0>O(c`s~sh?1TEV}Nf>z$5LDeODXO2pl4 zMk&$-+)4+0ax(08R$-bRvog{Wt5c#jdnV-hgWnHuy1ss)H~LnvM=2(EXWM@^N)23Z z3uUf72Rko8&R)`6?JxLbv=Z2BJ3izg{<174O(f5CU*i{B6;-u)JOmmwQ5vRap6ult zz0lZTut+{RYw|dmi%dQt{5sg$L~#I6h%8hL^>w~Ku0-tlH9uYEyved~1jEav zVViKdp`u#H;6>0b2~{TzK&l>5k0b&QTu3_MJM@G}{{lO;8ueOOy|(>%O8( z(faBg``zR*&mLD2z?4kQ=9UBHAX^N2NNIkECaf1?h;|_>hkoADHs`?YsGaaVR| zJ9qB+6ud`_cMASFc=qnQ!JqG|B*vcrSr`3?>??0HQ7`yV+X~zN6ds=X{b#A+e_Mz6 zug)WXv%3|I-=|xsT)o8A^z`sD8N0i?bw`&r`Sx_}qk^x(gRSs7Q51;D!IhSPb2k3& z*FK%?JRu+a86HM7MZ|<;omN?2_CRlZr4vp-uEqwurluPz%i>lB{M7O1Y~ici zdZ#2;w`|!rCCPEjXKWQ~!QIOZUH&3v{*FSXs_&!Gm^E6TTbMr>_q4)Hz33}W`jjwh0=k(wmWrFma#-4 z8yiCbz`<-9i@2vHEQbwxY;Ug2R3aK4j1^g7l&Cnr_efV|K18$g>Ef}Ftq)xXi}S$B;3=XhlO}!66zm5CU~ry z=q;SbF0C{9?MZ>JWz?;xuU-;|49NAz*nYB2Mi8=!0ag2)^MarufEVImkr*IYw5nEX zxHij7Gt`w@%IZtE zrp70*eo&a>JC-EsmgBgt(Mrl>JQhCxg^}_)%1_PPLp-*$`xec0#Ork9{OE>JN_&qr%3=Zn@&LI~Db%cAhm^SNT9hglYi`ZK!nIL$i=bKcstHAs1D!) z)I2%v=90ENWi$qn=0Oc~Il1E7lTB+HT5+o z`uJQghsJQOMu6rr#WW+V)U}H>x*+iczTnlj6w9f-wn0^wNkaI_1@Ph_|JLPI$2%2+ z{LSblmtL`Tk<}Ux{Z(-Zx|dLC;c?5Y;?3tHO8_VHx%voh(9ex3tm>b!P#nuW2)C|s zI&LgerP9}lFVg7IR&j5&MUJYV6M@ff8|#)WhrfV(=E|t`zN8nq{Cx0%Ddp>GdLrIB z)yZSkbmC&pq#+4{{Z1FCJ+{!(%nE6919{pmZcv@cyE?8|$0O^oeWS@P?e78kcfp^> zwu-vc8M8=CHx9t2!(k)-b@>+&Qr!#Rl!ddxwr#W`o~YVEGN@w;Y-2#;GV@`_{f|-8 zl9tT**#hh1akLt<+#L*kSH4Ssw$Mw=hFNF6XyvP{ok;HO;uw_8kPPB?Ao%d}nGj(M z$4c69L_ry}pmoNDDX&sq|Ll=?{nLz?5+XVn+M!fJ;$WkPBHmYLy_`|qL&J|o=qJ2n&sz!LO=*&cXb=C1H zh9$auv`L(FYe{&*VIh$`Us2>$dX4K0F1q>^akxD znaHRP`AnuH?!hamBIat7)+q>Le>*Ce?zvLP!2^zACq*5}t4$+VVm$o$!a-3E+!@PS zT9~UAZTo>1HJ-N+O%d&_OJ}Uk3=f*9!p*zF1GUvohq$SUhP!7_^ZrT3Oi$^Yy<8Ww zY5<~peUMdpzn>fkCifO3Ub`uFfBOX7T5F;ou2|ZPgF%BiHGgg-K?nW#?tHq+&n|6P}jp1>ye&*m23I5*dU`Q&xJTiSyagrICymSAGO@(hU%G&6B~m|e&$>@jpxYql!; zifl3uW0};VaE!l=Up92#A$(|X5YoGkuqR+Ie{AX9Fqz29^$o@^?O#tiUPLe0%A(4A z`J_20e*OEoCgn(&Ye7fJn@CThPD$;?4B?{E?P9VLDEx3{EZG}WVL?<}Ss+;vmN)T1 zihhOlnS}rT!^3)sz>SN$9bfRBgj!stWb$NnHtE2Ldz}_-aRh;W`Civ~}&(Dh6zj((=e1rz5xeC7>{`{F9 zw;|W*y;8rz%#iU!4aQvT&=gvg;7Gu})zLix$B`7pW+(@J?A@&_0B=y8$UIJb8fYk% zVDBkYqemD6;mp7pU%f21T4g;849OHf`uQgNy&fCq3;3lqGO~8QDJe#z39hT6DwTfr zTv}zUmV=IAJ@1j)SaxgC6@Av(Yw^aVZ44jinx2dWXvBU@4Xkk~43ZJf>s@a0jA{#% zaegU`{}G{%H|$ROuKHS+aSpQlSkll|$szmrd!d3n7iee3v~=NU&hk)Q*-3?#`N2^} zC*M0KUy2SxZ`YyPIm$h25lFcZcI^}nr>OpR#I~4-T7UXOU`i8vO&av4-~~umBlhPLk>~g3Cb>6=sj-vK%5YpEYmB@W5^<>m&wUvLuSYe zc}zHT=)yCc%jbfkKr6PfMC^98;+I{{KqvESZbHNgnC*jwmGa@1nIS4P?owSVNj7=q zvj#!o+6|`?x2c8Gf-s+6Wdg5D7cptPYUbpFH86Xww~1h{p(H8Upwiolw!>Ky5vqJV zyohH8wu&X@%SC0H`PNb-g4lHKR-L$BLjh3dCsvbraZy*ocm8OC&Uc;~82w^lJz&=Xv{|A{ zHb%WE5ju?33g?~gfZ@s==0jV;f@HkjjBIJl5tcx`LtV8Cs)aIE6`R;Y9Z#e93^Kc_ zy+dugFFt~5NvIj41s`5m#AmKqSyJm8hC10tfQ|lUgvZ40ECTnmj7wh_SNQ8mC1w3k zWfIC43}uH;wn|0H4sJ;t)Ls~uDy1K$gsP?XOic3r6j+0Y@xFwvbxbo<-EnbnBv#CZ zldqg+oz;(J%6_OOI;keJ5&T82VE=C8bbMQz7TkL`fC)V0xuk(x(M7}ws>=_Wfvkdc zQki0m-VhNcx|9+V($)`dEa3n$Qs87%DHhue!2&aaMHpay83O}}CCJwZ-+WB-jA*5J zyU?(uogO=83PjUIZcu(wTdWMYzMTB>@akKJFC`cDCsEmcCBG|Y{~UT1g~hf2<_&}D z8|5L1!3al><5{7mH}e^b#GLx2<#oE+kP%}@fD904x*U`qYkUc)@)r75qQ^Q!3XTgP zX534)>V7H+?`QFjCSxR1zBH&EHGFFn(X|j@oa=&gvXG&nLiWq$EUT0S&h4`jO2qMj z42>B6ulmc6nMVu9@TeF1b5?eq^KtJylza?P!7o7Jr_93pJc8BomutO**z>s6)i3wL z1ht@CK0D-@GeDgR^EN|WD&CBO^uabZ-oS9B`I1wO?~<=0D`H0LiA>3VB5>~q&-uiL z9sp+~wJp)rJPi6AEQlOuRN~~VS!Q_mhm}dQm$fF;qS}6{p#l zK&}9Sb$7M5Fe(|wI#7@3J=*tK*B2%}02B!mq9*dnT}8ay!77|Nyh#h91BLU{xzDPk z)hW${<+rx@lJbKh(cv2^iXofUD_hFAFcy!a>h=texW!WCgu_dEF<+7>!&n}UsAHH~ zM&{&3|JeKEo&M6EvlY!$r(MG=d}PAq-8e~Ky-m{1-Fn8H(Xeg>LB+~2p6u#{Lw%0R zJ#yNG!O3{wDBQmaVj&3eQj}9HZF8Ao7r(v}QAAJtmAr^)nG+o~ox?YRg zIi;u0gxcsAZ7eYk)rnqu@+I<2Z&ZV{&EH-xxf%=B2+L$BT$ejMJy)0rD$H1`Da^2* z&AT*ctX4_guZ&Py^{NwnczKt+-ML3K*TOe-!AkJGrD}4ud3R>6glfOEBKoYsyJfrF^S`KH3EHys)t9gGahJ&dv4-=-`PXg8hu{vH+y^2Nw%n&@%hUs z!)PQ|$hyl+TIj04MJevkIXT{NYcwnf3k)1$F;FvnqB=-abV=gSs`u-yovg(G5mQNq zM~|Kl^_$gCIYTO5i`hWYoSK&Q&EK^FDgbE8QFQVuPFv-e%F*)p#aqjax5>){F-$xF z^KobO#agDwp@NdqR|3Cl1fc`~e(V1#IMmGqn5}k!U*Tz1jPqAHu{yW^E<@Dm1j2j1 z(ji-8PywYT$XbO%HO9blN2oD*$qRI#LHF;11|ev#-rTzS72lO)C_&pRJvn%i+Ulc_ zfszD>fcwEyypH&f{#dR~$cX{y*hYUQmZhQsE`&YEic`)3&CNLH--;-acWm_7Br5&x zefg~y=|GFT^q>RSk5w&w zOd?tyR%2}_Te)v55B5wL&feV;XdC&LFf&BFmdZS^eeT@tP*aRg7O)nZ29Ec0BI6cL z`eUedW{NZs6II_VrnaptcDFk=x5gw+Mq>uD=WgTDVs9`2WsGc$@#!6PzikimYHa2 zVK^`*Q5oV|dhgHR!kPagMP6oEc9++OK%k+fl+zC9JF?l}2QlovPq~agYwj}y zNNI9|cndc$WMluO3-jT)5~H$Qv^c?|&#*qVgfEZK(-GB%byk{^Ojzbt znBxH`C&gDydf{yRr0YO^-Tk!Se5|u-rCNxYXO}dkT=jZkor55IY|v!$7W^+6bBm)l zdR}j>SGl*L=`59#XC(>xvH5Yb8Pw~`&sJzfy)S9q#`H8y@XIud+Z@6Q zy3&5r!PEKaLJEDi?(HhePz8^=xBs|=nt=g9rn#N|k)R0who>aOq&S4RjWnL=qWX4h z*qT1*z~9b99>Vh5E|5HT*a!ckwScDuN61@awphqRS4Ce?{un-n1j!R57Zk|)XTfVU zr17F74?=0YL3)H4^4t{U;RB=_;th*GQh$cL3x}kuZ#7wxC+c%Gym)`-cC~GI|FDD8 z)b@PtPJkc~d8!*p=ovm&wbf(|1}$E93bHX6|4-}V6-EJq3SHa!;Fh~9D;~8D5wbewRP;3v3$28F!FWydLsdPqtvUlZKYZ$jDHt|7Let2j7jLESg8uR|I?AD0&?!-+7xO1aC$pvX}I6*Q~``8jAPp`EOIR2t%aSg-u1+hVw_hIK}#n36_&3yUD!%%UV3$sNIsU{)(-EzwSbAFi9q8S&QrU zq_)=~nEzLXx+&s2g;>Eylzt@b|#idjm1*@ypYQqh}JL1J|gu@Bkcoza61y z+87-;u8RjQZMh2fy<8_t;WWV#Ih!&$;#r?>bkjXT0bF>KL!NE?IKl+icT7N5sdwKo z?$^HbpvUcX#z?URJOd~7dVK~qM^Z^7i_ZEaQjk~l2L`LJI0?ICB7oG8!UFgL}+F7w?+a@&@im-_-qZ66a( zn^04>SVORM(*?Ojg^~>&*xA@L2sg+H760)xDyVu3EkDEWIzxW z3F_BmT#MB}#Ekg^TEnuk{zv85i5_d6MGb1c%0{ZFPa=1HGrO2+(*6dR9)YqnLB|Jp zM65e~h#%z?4(|Rr(cZ#pMonxo$XY@rZ#pT#uvTr>7Ci}#t3iV^bGoocjPK2%>`a9z zuE@a;^bBDQ#lxi+e?4h1V14ZCb)XC-=Tt&$SvkdYS%0@tUEHP>7gu_l2{zI8Sn~Y& zhb`8c+}E9hzpsCAv|O|k(Kfv|Su1o5$7cK+WE}}up;2j_IT%B6GiWryo9pi2F4wmT zLKy->vzMupeq)8~_Z;X`nY0F2Vf2_+j3CjgO}d#%b3$rC=N@wr*?2W!{=b3yufgSt z-xt8$7JV2OMQ6*5(M4_!Ix?I!{=-y@0{g!t11(~1F8%J${kZmHl|)fNOK-WwCEKt` zF*GofC7j^>6oU3kDCvSOZ{f{{U6d&JdKOq9ri&kekfNu@&NFlMP8OZvBvl`KpLnjv zYYHRWn<+~0(1FC>MiR;zkM6nhTdOH=+8Ok;u|PQYzbFFnmJJ0uzP=KttFoTvddb3~ z7W<3|{?p;d;iJeu#t$C;OvJdRMuklw!>i01J}dGc$l&Z?e;7-J4H#1CuxCw1cHSF3 z>-hhHV81Ww{6$6y<>3E`#FB{H!pYkt*$(*B_OqkjKn8w6{_GPbc+%b%x9vQqv1o$HKoMNRDLHUBfqf zG%Y>XDgSMT`4sizVX5eUCmAw-C*I)^632Dir|5Oue|hXW$}G}rF4cj6MvXqEi&a#c z7EayAtNhP7ijc2A_}{V&tVflc!9C8FEx1^K?`f)0!4&N9Rq5gXZ$)yu{2jK8!&>5H zDN*E04@iLUeU)0ek39x|LM=l`u4|gH8xy2n7yYv=FCCFTL&M*w{^5;fH-)QEfDCE` zhr2tm$*)S}j_3Se-EqFCUd3x1z`axbB^drJF%cE7pa7Av`up1Fe^MSX3h6E6g0-?2 zya0Xr@bAwYF~R~5RVA5JoeT{7V8?FCOv#I}` zEg}z}|5s{;e6RTNO0WL?pLZ!&NPsbUNKPLL{_ibk6?7nS>)BokA2|ZA<1r61?R$kM zJNExVXAu0w-_`UmQUGUBkeRJ9KNn4HaaXo+irnuj zF1H*nTErcW)8$(Y|cno6MLdtKedIgb_++V=BF{gLg<;g>D8Pq;#)RwO50|Jk~a zp*;$~vzn`OXkErjZU560N&6wcAnhw_zQ!kWj@TGQQX1u w!L6EvzkkD`yD1sJKPYUB`<{?XhBw3oz3dG6QynOB(uz_guZ;r#7m`o)%m4rY literal 0 HcmV?d00001 diff --git a/priv/.BUILD_images/Result.png b/priv/.BUILD_images/Result.png new file mode 100644 index 0000000000000000000000000000000000000000..891a61e52b1777af191b441a4f3587d7945e2344 GIT binary patch literal 24026 zcmc$GcT`hP@GpKnY)=gGkZU?vpY9ZTT|ukEyi19WMp?$Un%L5kzLul z{Jp+Ge%Z2eV-8J5W|5<+q@eF(xz%dDIn1 zcZ@oyvefj3LJ^o=$;Pw3x|!Fg#Z5hvo1kLgbjKHC{ihMrCp%etN+bo{w5 z0>|fvuV1_5@V)7h$Kjhp>%+H)b`zELQWv{M-mIk6{?}KoT=qVK6u*r#44k38fBx&I z3dq)x^6xb-zI?MB%&Iu`0nCpUkNyF!|0N}R#dl}e3vI4>r1T#&*1WYx+jGR8o?Uh3 zzulr-k*hs1+sKVd&9uyaFL2k)AeQRo4GawUu2cSP$^Qp5XanFUY?B2sOPx!SeN%E~ zBYQu+QYIv}H{2@yo-L_Io}7%Cr2mg1MRw)My0Do0Amy)~`3~?E`#*^+7sl49&C{A> zWc3sG_s!reALJbM0wUXrZ$!z9|^%_jN1Ys%LE6pti};qhiOckkW^dBRm8 zrDhv0m)mE982PS;@SVO*mhBbMi)KQ8FuMlX-Cxre@D<`sl}dtZqyyDF+@D^zZh6!9 z(3Ix_AGm|Mkdi(BX*0e*rnUwq9lCmwSiH5{C1^u*HU5(;b6Iq6@7D7EXh!2|zg(Q! zv2b|F?)p^)(`R@54$4jT=V1(E-=+qVBS=Q=hl~|zk_q$v%QRem2L_DeE#jgM{m z+Z-CX-25o>G|6iPX%5yn?|En=#~m_l*?+(|2V5RlJ~@MLUI}02;L3E&VN{=&c~xa( zS^)r2#oRIdxw`Q5I?7(te*uJ74_B8R-i^X%A-f&VWX6+)EXBh$F4Otj zUrygF0h{I1rfR(mGIQD}hDY1;*f1Xw5Ps8Y_ndGJ^kmt7AG6{K?nv83I=+@HO($-` z(>}ZHjMUodGPhag+$kzc1tZvW@zjxI1@khhO6=ws>I@^Vs@J|VX$C0~SVvrJp!&8J zEZlGM($u<&xmvn_4Wp(`v$JHEFG%MY#2C|3PgyVLEuoF7Wck@W&2ixYfDj-Fbv zgxZ zn|?JUnM3nqs7Q-9+xnFM9hwqiA6>+|YS(d!v?Yh+N9P3ji#(B)nVE)y$qLinVG_^4 z`Eop9O;+$W_-ZhCUH^1ub8398VFq_m2Re_ki(yjPY2S-V%gBA?ad#TeAAGgK(r+Ru zSdLlhIv}o5A7j{PGdk3Vsl$IWDV5l}za_sLS=4szJFqhBQu`{u#*pi$_!XWyjSR_J z`%fGSx5Zq<-t`YOw##6tIYO*(!?6#-GW|#DUBQ(%hAlqt125u8f2hVXNTVYAC=wA> z^q5LkY3Jrr5I0B9vEMX|VO$K*5#(~gCQp*Im}-?g-=jlmCkRWOB1mIz)*m!BlW!#e zowljf=dC|7hBYhE-(#zt=y@^MGYwplK{yu|rcM!m{$%cvhG8YJhAPwdsXdoFe6tYg zSkKofO)Rv{S%Z22tND$uI1kyk%<{g~@nUTJ$zQh!O&Zhwd|hY@-+f^puw`dGZ!Ep< zz%n)1q<>Y|GLW|}vesJEJ*YY+Mw*dXOO}@GUgirx#qLBnZu@)10NVWm zM9Ik&PB}1WUa3Y$l>%$>c87g?Q{s=?`cLfU)v|)Kg7$_<>hh=Gsn&J|@pxLkOluqM zm>GzZ98sorE#fylkS1E;Ipn!;585rmtCKlqKl9`r)srX00q5jM$lh-meo(E=n@1Wy z)n^hwWG9?lauK1`=_rz)aDOv=B`&_q?Fn zF#UIzO)K=(3nDo=?5d1yXHxMRKq-KAOsY%#!M`k)lvb&ZApm5qlI*~7UOhSv; z2OYNMgXE<|0wlHq7jJR}@2PSzNhQkB%cM;1?`Un1-^qr?$pV?kDf8XvAERj8L;5E%vkF<&-^X)6Zw+NxohC6$S7@i~v_WL%C zPls|C>j5lBZ{8OqX13Cz6hfA!Ey`}ZZg+N*>QkT7v8+zk3b5rZGV%tt+bnQSw~_87 zvB{|z^vEC9B|rL!o)92=ad0CezN|9X#jCv^t<_eaL791~q;}GsSy^x}h*|j48Tl1q zcME`MHVWWl)i9E2{LZmSJuTO$^;XQ~@saaApj_H~EymMHIySj@M)1MY4Th5nUegiV z7^eQ=HPnS@QVhL9OZeS9-S4bc>%^>}G%kd1_wm9@C+X#_RzTQ;x?!72%95Uvnftym z=4|})!0oKAmVp@?oRo(FO*dLn9%*m`wWkj=7ryGPmY~VP)O@OLJ{PMz!>y?cnSgLf zd&0Tm0&x(2g|G2tR%Gwjb41-8$txKphre0yx5)0cawtS=EETm_dfC_*Eh)WAX*g(> zi1RbHE2H;~YbJ_9#w+5@Tq}+oF=4c_`}agIEfaT`CqAdtljR@l70GbIYPI6DE@f`r zp`$TYU$mLu=gMUKSZ8M5U zRCcAdUzo=_Ciei@j3tIEEKK7}<#rPg5aK7|O(5k9_j?KPW*Bd|Z_TYi?I#DGo&*;u zpFJBo0|S+kUq``pF=wtvvbe~l!YI2#KW+Nv>Ne-?Q1`;~0S6C~gV_8*{IJtVmdGSV zxbj`k;kp} zzuGpXe(%t%mOE`mSqQcX7i z^2)Xp!`jQE9$AMEI$2UPW&Y~venPx-CIY;?K+4Hj(w+QQ!V9+yRo$G znC|l-LeH>fZoaB$OkhuTvO(atB>|FNA3Q_Q)SojJSG16_9qxCklSSw%;iB6Bl4 zF`k`lYnuFqA#VrKEg*kh7BmBM?c`9A47 zfqKqxEg4eH%;DaWzNzn*KFW;3J^)yD>pGdXw0(@Zg>IwztUJkOr^-9^gd?~qvf&^& z%8<%uT9g`0y>K$bCH3yiyzSAeNwbZkXoeFjD;zP)FX|RsU}BK<(0EIuiv~qjApG^< zGcS6{x10w3V6!Q~>H3{(TXVrthb(eS5dKTg^}8)@-E4j68Fc0SyP!ykKj=b>ecv4g zPr-@;=hex$!p$>Zy;7fS6f7@GAnE2<@YyDKPpjt8850(J1F&#-yyS^%#@qivTs%dY zU1(W%%b~Q`rb+u4_ed5XrOE4wy$^rAx@$vdUO*#uaS(ELVFPxibe&Z#0{JJ%?TEhm zfshy~F_xEpa_FURBti`?jm?65esS;ND7;O@UZqQL z+5#HVN0qf#M@RNzD_hk4U`hfu``3g1PY3xZ{nEUT`v7NfhmOB~v)GQL`b>YEE@at_ zOZwf|ze6i#$D0jzA4Qu;*ff*cJI>tI{w0;04UA|=V#%=}#*WUPbIAOwj!U<6=b1Q^ z-O?=re%Squ_XdQINYTCpt@lc=T=)XDfUn-4HW|JCybm^hz#uQN4_s=WVqBjsI-r;i>qa$$0*2oCnr8TifMC=uV*L_N>Z!iFfte{MU__wia2$i0c!Q%>rWANIp(9N1$g*v%Fx@qWf+(ky zWMDAX$>x`j@UxHE?<=d)C7QV=;B7D07|Jt77T?jA0{NonCc1|da8rX?t;pOac}&WV zld3;>u8us$w)3|ihp=m;a%t?hSI*_-_j&!eFDPcd})o zuPfC_v)Dwj4nQ=bN#mWwk`tSr+fVav`hou2#m-F!xTec6h3ih65lkwZ*C z!*=N6naA`!eb7U3K$RMGqkg*GQ2$CCr4I}k$FgDi(mM*gXwh=?72Lg>$S2FqV-JER zy#fsT+Ok_rKT-UFa8z9CiNmG|r1PfdKZ~V%o6?HVFy?SEgZp}<1yo|vkiBjhTc3w} z)=qmF?XNBO3+ePfpKA5Ab7shLVT;zV1moZ#18H&4ZUX}&;1G6Q{P=ygzP>q3hCzK+ z*E!Yq`K2%|hGwv+4;{m6bW#tjP znLKjoxck3Bgh@Rgtl$#)eq@X>G~4#^^rJI`-5n9n#6-o^;roLocZQV6iE(}Y`eS^R3F(6@H6r;4-Z{Mz7-`4@9I`7( zSEd+DGeJ&?r?)$_2dx=%lLtlF;duH7=qL0|;0)`qS;$#wjuhrVSlRL3uUW5^K_O@S zi;h%^{=^DGY1}P|*(ld#74NN5Ppf;0>!sE4C{W#(={fhZGkKHF{^@6I0WXMD%-Y^v z8jaA_%)e_ga%@_N-JAlkw9JR;#Csu7Wou_(Pt<+B$L&FhJ#ffbQ9RV{WU!KV;#0iz zvWS%948(dUwZs8edb1R`op`XA?qN5G$Xq*-Rq?_o$oKs2OdS+B z^e#iGZy3$%aaU-}7-lHyXF;#l2&=s9!Ic(YCTz_O8&tPAG>kD1WZ*%gP1fB-gAG9u zdf+pj33MiKKsP31srcQr6lU_%THm#h_0UCYT=`o8!nBavaa~;T3 z0jS7Ukz-#WlkvLC6$eWkS~{nq;z)IAVpqunFn!POrpPZt=yN-?=5<-|#{Bd^qO6_~ z#dQV#QIku>lrq%v*m%m78JaX(MRN@Na1|?KY!=mP$QO2p(r%qOMaH=?HZQz+37&RH zL0;O%fvTIZyP-vzuXCD2G>T(rhhiWzxoq1#5=4%h`V*TeJz*iLJ5nxvX|FU|0JcBJ zdVC#}AxrG9r2t35W<_Uqlj~Ttg!91$7%q`jABXW^#Ru?J6+%?OxFIlZT^M}8P^%}E z2knF82DD1ecomin9rC)J4lR{4G(a;B+|!>q_RdRHo!WonoT!6Zt-rY%vYxQ$fGbB{ z3P;RM&8*QSfm~7)%oun+D8BB(Wqwf^kVjfJvMX+JD9^_K`qxYtPQqf?{07o|p@%Y& zj6;6}BR@M-@`$NW>ba%0oI(ZFO4m$l{cq!Sn=xn;8G)~hk`y-`(%`ua&GlOuIveU4 zmkzPXkjRARL=&9Hre2$Zj0gYvN_u4)+;*E5zFSsUaa(Wr`d?XzyTff)>JJ(x#hp*T zPHN1KwFf4RmuZ2J8^f7vQ+VL$rljl0F-kK#tBox`-F4%Ihlj_~ML;XxLw*1%&qF*; zN4W9rJV2*|;Ud~qdrwVt_-*_G*OhA%bk91`CiS+9 z@w+q;-&?%?cor%D!*WJZTN-}Aj`T0LI31NdZYSBCYj`2_@PkN_S?{4TN{iElX0lEL>LZAPF-+{jf_J15YO$1W%UqfGV z`X3yL3hklHYO;YZsv(#a-u=$6;MK5nmx6_}ZK0dND+2RaZKzF=%Xq5>1#&YicKj@i z@{3)CYr@$!(yVyxpmBQU&Q7vhm!P=UdMP{h*TIIF-`KOp!`y`%$HDV<<<8ujDA#SP zo!Vc42RcMb)*LVgN^^^i{#Eei+2`!4vjMACs?zHgt@b2uw|;-61megi1?-rz-J7t> zE;@14hGb#R4PaL1=}Nlm>XeSn>QOT3L(uumN{F3jZ^7WROUUq9@W=ywc4JCQtEXLY zW~}?Ld*Y3qe_7$OrNlcur;0)i2@qZ}FVV}AGGgO6U8KfV-&GLUdpfSHa|LyA%CtU7 zP(dNtV2wW*N+65wx_mk1#S6=|v={rHS|Zhl7+xs?N6Lw#zl{aza{J=oe# z-#ih-%D7ACz+pFEKtoKTzrQu_{i6GKQ+e3Z1R3mV(zsKpcGxtI5?_xsf|GN2Me2PZ|ro9YQ3 z$z#7ZlZ!Q~itK;;JFba09_Aa-wQ0?T*~2k&-Qn)b4#XvRqgSJ{DSi`OnYVz z15`BY=dT=#zS^T?KY8osZk*n1%o$d>e=T|kRmetzOSSSHhtAM)Nx2!P`AGo0xhQF# z4zVEgc14N@%^Ge}YZWcr@?gCfc*W4Ih6fL4<+-BHHGd#{$%p(LI$zyyyj~D_XEbJ9+lTrfiV#|frg@J4J;xoe?{LM(-xd1Ki(FXavy(rz2@>xGi zmo8`8A81LGgQPPEo+9-y;ekDlG`;TPw_R}Jx$wDqe6?|FwK@ZvV&F~ z$Dge2V^eRh1%o}|!-JQziQuubKf`vfFwoc~`23)8f5K>C#Csu_KY3J`ja3t`VbiGJ zK>93E=noqnnDYg%ts6^x;%qU_*Q(Nl1rTo8NLmbhEik!_iIovXJt8-7=hLKH2>38` z)Dp#)CG%o`%gA%LmucBsZ5ht4q%Um0$chi)&6(YinMieHit8=H5q8vqycuGAT!Vbpdk4?}W{Eb=8>&$*Zu*4j_KR6M%lQuteMWh@~$ zQ$b53QzZ_T_R<28_Ntb)BAeuZ<<%TUCk&$zX@73XN;%}VS$uWpg|hy+bKvVI17#43 zB9FhIzT;c zZ#4kF98_NpKvJ{4zO26;=?1Jc9Qg^GJ;_jXk>-Z#1F%Mm5k9EK$H-wveKOR`xWc*x z5FhC6QdLUe<(hciCCG%fnb~QNYd_-1KX!bJ z8f-G*s~*l69)5nJ(}5l9y`Jjd^_Csa6C3vbz(&lEOyIb4UB_1sm>Dw|xLSl`o*XJ4^)gxP|oM-eNgQvHU zck^e6GOC%UvKrp9Lgr&@@kP(`2#WjNi{2$0pYbKVgBEKwv2*$H3**l3(=vc^N3Yk? z_YeB2+Kdkr*SyCWa`RaFJdP=SX1gDJpF*9ag1!ZohvBgOa#9dEDsgsxx=y9|A^|#g z70ijsRA(8?);nuceM5_m_9BVzmON4-JLz)Nv<8PpPH)+$ac;Ia?Y@~gEa$Q{LM3V4 zx0;zcoZ%!6zJl=wYhoOU{rNg1p;qU(Jyyp#(7{83bYeKQjYz^^U{?8w54<^;{6B9rB36R zTE;%3hVp~*igc2#s8{3h9;*vgsN}3nIfS3w`4ibc`$|a1Z-o75F^j~hrd<|7@q^{H zcHjir>`7z)(?1ffW`@$^0;fv|VqaD2oVZV-z$(Rai;>MqzVS1$P`k!?{*JVHcX^93 zvFI#x7SPrp+@3gp>x6}{s;e<_t6^G8yKPT$QlwvHyf>8j&+~)9B9ceHKfiH~9d43u z47+7qz}Q?Z#Hk+e9khBzE#8;fByz9=JNm@eD)@y@Js8FpbY38^=QtY)u{}KUVvsxo zRw!0}sO<)b09l(|3I;^nWS!^q;OkyWq9X1~%3@*(N3m=Gj-~?!Xk9-~`xV zUko@oZxm_uWMZMc9)P82t;;G0y1X9xXWC!N-SzbJ9aI|T<9mip7E9=EbvY+zI6OaE z;I4jR#q?TEp#2N&#u&0y%=ObNQGo0Gw1BM`exJ#;2xK0_TVdIl#+N|~;R`yt8F+Lc zIr;}xh|)~gB+b_)1!!)cEeYr>>uH9CNSV(&Pl_6a9-s{C&EIx3^pA|*Jf?4c?2(=A zB_OySY%v#E0}762rBBl!?T5G6fA`;58wwg81KIHx4ydS%)z5|N%hFmF)OQgS(lt!UTtquqU-`*?!BsdFY@Hw8e6MgJQUh z?aG%68K@O*yWn7F*(oFQ?CfJ!28$9*d83iQbNk-7L5*$kcokgx=8J z_-tCGA2*WX!2{f$nUu9HaZwpB)<_p~FdncLcBwyf-Ji{}5JU0$sk9#-Dq7*3C#wo6 znG7+@WeY__v5GwK5nLCw#WoF6jOJql7)cr9brX3WLi6)D?Y)09uw*w{s|-Pm0(@O| zVvM-UkSxGU=i+sHYqx6!$Rp5f+Q_5;SlvaOp7RPnb98lD32@@k?eD!69UX6Ubn!7! zv}J{qZjOna_6~Y4N7{V2t9$xgaoO|m4oW%TgBRSTpwN1<@*=6x_$gtVi(_1>?7Ex>q;r zGMq;QIEZP1afEpaqp45hBdFG=h-lY23kAs5b`tpolhv%#Dm<06Qs`fD5E!T1d}1fP zF^XsJqg&4H_YWSIDzuENwJagtF*#4vMFpI484ki^vV+LydK_N0e_aWf(%`ECeHPv( z@XxB?i{zYAB*S;rz7rc5`CE8{dwq2HN?VTr54IcYwi6fCCIt3=)xbv4!otE6UsIOP zxKb$({Fk^!mu+xw4edQR7rRvZU|LQy{H(=*=l(Or?*qP@Dj!c@5OdoK5=-LuAH zjpr!yGFx6RZ~Dz1-wZ3cov)S8>?4c^@TOa^O1#@EqC*sh5WVUy_4fZ7HJmF z>uO&2VjT^nsl9D*62R*i%XAF7B<&K~U^}4;?BFF->!Orj&<`bs_ToP*Cq{EFEsY#o z&zD;Wvgd0bAp8!v zJrs_g?|hyWALk3bGDkYTy}#7&S~R700Zctt8T*N)>ti^na@kg9YS=2QGxHlM_|YV} z{mqTP|JhMEP{SYr(`NurOP*_DnLg;Y61nI}g(hvJv@KwBa-&dQUcNmPtADB!>ayQG zvq+jUOKB9?Ws=r2of&dJ3D6(nk%As(Y|vlynQnsQJ!XTQgF@Gnwk@TJKo1lYR#p`? z9+Sq{?0OD!XX}VUz4UUkH|xO;s|(|Y7Xp;=J#R-(GJ@j4xPW^kL3%gX*}AFz34^2> zq^z%V*gr7dzr(7U(YLM!){Ay*sM{9B3p{<9MJcF!Fo4D{dw2A6ACCTX3(nCrIl@#g zbsJe2hf}dtlaMnzX^q(nH>mOQ6Vsn@XO7OvcC+f-*z!U=hh`3_p~hf(`zx8+|ttXN(oTV$uorS)_kYA~k#bW+n(&CnY%nVkAthPV=#Y#s=hn=fc|sz_{{C;R zy2&rcT6J>%(xU_4E9FPmacB8*p1TJ*N`$PO+W@IWt-7#yhXE_qkL#iFX4b;^R7w5{ zl6OMkes2+CnkaX^DcqbGRFV=yP(RxUJI>>zv{|W2+%3-_O(Jc7PRfbxvj;o5GD+Cs z{7gU`OFI#@zD0AB(gFh~(e|kOzYjFdl4md88M*qs#a;7r>~c5>;<2}N~(BlV($rV{Q7EnForhSMhI?k2lqXcw=wM( zx@!`2H72-p{FDLnfuS3VIUYPLp&ISV?7F|>@`8P+H2&WZ;644u|8>vozc<7FZ<}iW z8^IS~d%2JB-vffW_aL}*aaVF^CykN$@0(Y9skY4|7+?B|esnD7)rS9mACDC*{#tVL z|7YtBFpep@q`3GeQ`XbO^5@wh%f0FHEWPGdYEidJ-Wf!4$fR=er6@=W-zxUW^9ISN z_7UNoTf&yh2IoYxm!E1m#+2NgBEg`52&`!LK=+eAgjl@$UX%>+HGD8PGQm#gx746p zWPW~xdy#rR!)-@HD#tb+y$qlcD6pPmB(zwd8BkjcS?V57flMUSIQrx*s_y=9TK5f2 z6=$yi#H1i(PHsPe-!l6m4x5hHSeFo5Sp8^-(;kM2jR$-EQ>l}p@V)N)`0m&%-$E#( z#z|LN6I5w*b5z7Bq*9Tu@C+=ad&&ZMF8`gyc$~;St*+TvIDTB3C1P_uJc-_SBSYiU zv$SK;$GtNg)D^9-X=4fp3{yq?;yr#Dt2Zmo=$)QZ4+@oiKc}%3S^5o6oaYUXdP((< zHDFql5&DF8Ov?iMDNLhmD7*8mgud*yU9$H1qIaKi7IbC;! z-hv3rEVjLAF48wJuz#)e;V_H3^VnEPe=$R%bTE0$YKA5xHLv6myt}UYV-%c5H(twJ zGMevsd5|q1XMW^Sk=|(=z#eERx6D6KV8TC~`%lqsr!&{MZ4&j>CF@i?ma&cGW>!JOjF{m%4HZA2ZWxYwo@!#69%)zuG)2Z2Bl(Y=4tX5X%hb&baMKZ zfnJb%mxi9Dg73R+?+J!lRvJ3rn2ccnhI=fdKm4yimBigg-Bu_eijb`^ol0MuXmE6F zUP;Qq%8+;54PTMT%4g8jx1MKR1Qk6)q-d?9s%#IQi6$zhI7y;XF-%sQ)Ws z#_Q1(a7v6xil=5Ae7q)H^a=LS;@gFpMAO5*(e4sM*0I37uazHfvG3>Kaa798QUA=g-X9){=0oY#h38E)YftNQF4r$Q zqwb>$mJ=yLuAC2sfx4?Hi2F7RfNRUN2vjfH1M$Z;= zB)#yIj0s#v5Y1|wa+|^m6XWlvt~IcUD9(d4 zJ@(tyynNEKOCUI;RJBa7h^^gQod$a2mALg+G5mx1o62XB2|}9&SAugb^B_3h!M(<9 zm9*C3MMCx~X_oXjEW~9dQvkxOKTXyG_02SXcj= zv+r4p3T}Sy+slUDFYjgE$#auO@Vc%&Cuds;^7v}3E1|9LHTqm_IqN2(VGFJPaXSk7fYyBUroG}G$mV<-5oL5b{L_^&CnmS zLqW!LVJi0%Epu;>*jMN$5ycc+J&)A5n2eYr4OC>JSvEuU={>biwgA^)Ta|VBM+v6$ zBM7?zd{_sN&#rbcZLI1+cXP2ZETHSa%l@@nErv! zD1oOyo)O3baeo^q-bi3!Nm z%1n_l*azj77cEC!xyGc6EJj$b%yNfHtxtb^>4aSsaA#%KT7Ro&edv49$gs3{dGzKg|bMNe0%B9#ti8C`2-+hz#C4 zfDJz*KRWNagFM&HlRa7fZto50e(w)#*WrM>wZ1grXqv-C)$jyNtx^EcC&ODE)Weo>KC#5DkD`5=l*#3 zsk9fZ?|KHOo%NZzbozXNA~QhUzw@5e#gBdAPIa4~+E&|{|G>?CCoxnR98)kIra&X#E2J3`LA@wa1A8YkK@>M;pQ570o1ZClz!Hmy4JZ|?=J zFXQM+cJBUY%39OfGPBiYQW*Xn%1I#Sj+4XhIwJUqOecGv#7bx6@{*IQYes}> ztLTqK3JNtoHOt7PAkD#yDKLB+svBlLuIM44Q=@&yuq)ep->j7@={_eNUlHqC_ZX5n z(P6#w8#5!;psqjx(CK6`+rrfOZP9A?1TN-vs-T4Gegoh)$x@?MIV{RLO5e&!YMy?2 z$L*c;XqClS81fDc^`~V!3jw3n)~&M7aDoFh)>)Vj!C{dvZqqCn@tZ~}@4TshT%5{| zK{;0Vcdu?sVZ2;N#+_FHa6)pZkf7wM=*#xRqC@T=C0?~(Psjn{$=g1X{ietz&2!zX z4})$@k_(+1u&AdoG8!o_QTIjs2JA!Ks3CCCMKJYOm1#;PynMN$6qNakMu!k)D?XPa zi|Sdwg!2-EMm^yhKy@^t1=gE$--~qsm^=yeth6%?*}N%?BltjYlQ5tL|9fMua$B5C z2;V*7--QqjPK}gCXB>lt{m8^%@!SDm@9wJuupzRvU)i%g#oCKHpR{3G^OF~IrSeaenAkk}mu1Oe{9eK$ST8Hbcna{T*v z(CLrv4-SA=Kjhi&jQ}I*L)|4?8m1GEE~d-=S&90@+zPU#Zw_GJ7IA0&^EICTsEpdf zen^j{1mbbO${Ezh`fNRv{%Jm_w|{GN20HGjA7kz`%=lRWVl9x^@{y=1_;r020fZr5 zO@($kNpy(l$az^jUe;s;S{#pBV0ajv<&k6c{4w#-y{dj;R=BC7Q~DF>E53gmy;u63 zSt_0p44P{yXVg~DvihAyNwfW7L)_Qg{5-E;Ixl;JE6-(Ok~40VG&Q|vk77tRcaI<5 z@nMka@mWC+uP};>!Y6$na&lSt2WfIMeQC;>bobC!dCWx{w{IJ|(l|^AfIsAY9rZjo zsQug;xSJbU6F>Q!$`Jok#NuA>RsIt%r!0lQqLd^zi~BS@B9>1}XaM*+-_LF=@~(?R zY8fEUpf#hk;e1IKoh!?9?*RPSqu%dY#YBz|{^?um7`}yh?)Xw$QAk}xgBcU7+rB%N z*3|uDx%C=YMtm8Z(yHFAmcpA3j-nL)C0Sk-PdC<=U6a9Gd8FIZzL<+vlQaYuGdQhE z`JK!nzz+gp%Wo`$23~s)m_Bg9bw3(;ZsaE4lmRD>_gm-9?-Dv=0Un$SA`PAd!8klK zd;3-bg>v3U!QJxa2(Xz9wX+j_?=8e%gviwWgFpaKT{r!`y>ptaLM6(=fdXjI%BB&AUCJwYl?PShEIQxj_GvxyI*nvxfjJ*MXz2BPk3=5LJz3hm76I zFddH9`MZ?Vp+~SF>vxUcLXr<9&VWHV#oIW81DAVfGT1-|D0g1FayUx zP^guhRZIo}Zv>Z-NEe+IkxM{(_!A_H>*K(}dsFswMrW_uoAZWfkE9-Cje>Jqi?*v) zpN^r$)78%69rw$U-B&t8tD3vITmG3HjU&jdzxzES7h`+KcX->myI*S(Ho{LeIt?s7 zggmHk6-#H&U?)#sn#%v_gc+clIosLU`$rxiPTm&k>eh(|uj#CR4B&aZ2xWe@F2*)2 zJkE+#$V>jacKEoo0sdj%B4b0D-&FBd>QK~`%dVq)>Dfk?s6XOFU1lA2HnxwBwzD4T z+WCqOxIthYoeH?0&}{oP-tq-&Kukcb)qK2PBKvlNAOK|3j=I-8e<(8Gmm_*hr~g4y z#2{MZc!R2$S_zyr`NdDFUFb3Du=?x!TShef5|#2#et-;qRS8OhE*3|F6~1AaGV(N` z2+E6Fm?szOChZs8niIXD&7dPF?UJ9e-}TrkXoN}LS(+w{C0$va1`aM8s`E7)`ZeH8 z*wEWMWdTim0TI9U0p4C5%eniVM9=o1Md**E}Es}r9^-0MR>J2MB{`%C^v7>10Wi&`1TEyLI}I{fN#>KcZg z)t}zxa&q`mcf_kY10I7LF+*-(jm@%oI_}S-8BSydrPWF~&+fRsi{t5A7ffOAZlQjy zt8Jy1kl*gA`cp0e*}ABt*wj7!k1a#&I)h{c)BlT5%4cA^BbHCfZSilGs5bs9hk>DC z2z_kVH+-~eRa-{{zt(hS4rR?KYXI#@e5_Vu6J`IMsbW?GN zhGH@tG6s7&b<6J%Zs$2Xeo*Nd$2zCQHngt6Tgmo9xG0)rc91taXqa_~IW0NTu7w5F zy-Q~Z_TW2cX&1d~+Jy1H%x`_A@FW;B%XW~Wlq+4tr+%5E%G?moCh7i^*?*xxu#rK0 zIjdx&xUact(RDw$#Y{VV6V=>t-GFT%-^J)A091obFJz-Av+toux8I5NhI9@(;o=Df zNiMk>(uZS(*q>;MQL~-l{CVEWoScVt0aNh$jF{DZii0@uvB-Kajuj7;2)s6ly|8!&ff@@L&WM5PIG7M@gF+Av*-&P@;O)=z}Q1z3i@B6)PtLtkV;?AED;x9Ijhaky`MkJ z>@!m`nIQE79RnT)&bhuF(m#{RRab8Gk$x$1`P8t~`xn0V3P{lkH5DzK>=+ythi2dJ z7sIp!)6@pGxQp#lI$Ub)dvSNMi^}ad@lwc!cAVQk|Jui8xSR4jtEpVW)p|6eRy09H zJG^W=N3|;{{(PAm(kil-q+h~pX0nI0!d>?lS>w`o$#0IcRMgmJ)#~>)80!`f*LBpn zvYlE3Z$QasNu6m9wBnue`z*L2x>)uxG@VClYWHTqcUpo6P=0qRejDCA&aaq4OrIGP zh{eX4)~#z`qK~wf4=;}Z*D}=mdp)j5n~yQ|&Q$8}f;KGOLi=*kYkAL&$E{SFt1DKy z0_T*1xTy5U4i}T^yjt=u6QZ+e-`IEtdASg~Fga-zQH=684t@%({fB|(D27{uy&uQ4 zYYdpWvc1adxs&geERs{+>u8UtN0p-m_S}@?1rh?8EQ>0Cpb_S+GwGF4GFrlY0i){v zcx%5sza?`=(9UJ9@FU)z-EK#tX&F2Y?&mK|TQL$URb`bqA8Kzy{P+c>gPpE8&+NSN zCYbvv(Vu)fnSpJc&)9$e<*0PsQ6tr&MHgFXGG3Y5)J`nKw5S2~!qZuEc8BdoMX`-} zlUVP9(LAK?29bRJDaiR4TMKPbc&aNFA5z_fTX((H&XZ3ZH0r0cV1y4$p9 z`l@L*K*BMt;|Z1FV=w(y?5uDJ(J`K0562S3tK;cL8O7YKW}_S2XK!)&mT-$MZLwIl zVRj9|pe#Dx>sFDhme+7m2x-s&0FzB1LgW|9z9(%SlQ#{v!^v_oT?Vvo0F9%A6K+cpvzRb6r>3v}8CkKce%^QvYr-qfOFq^vm3=A3bK+4C%vADydV3(zukX$!YB~Ko zaeWO8X#PdD%i0wH1sU0A{r{N090CY^e3mx>mtz5UEhV7{Z49Ey-$qzyiWZu`qyo*ki8OnFv|ZO z-J!<;wA`Jf-tRP{S@F8wcK$KQkkwpOOglDXG~M`W=!=XVWBfy_|Iy8LM>Vx{d#()& zh+d>iyh;=4B|tEMA_ge|>C&4JI?{wlyMj_QfT8yw5Rl%b1@KZsSE@h&AwcLzhfsVc z-1@!s)_U)Ie|~?RbGUZxtY%_O$mm&zK(l!>4l!x^O?RO|rebkVq zW`AQ|qsvGmMibXycGxN39^CfTgV}0I!ntlAwo2O;8XXE9lGS^BGP-U9P*h;O3Q)m_ zPW<30__aJ9kM#Du!%BfezIMArf~{!5&MWFEnO5nwMWqL~i`@WPdC|7tqExidx5qu?Jcnr^++t2R{ry= z)6Jp(_$n3|(N9ET=brNlFmFf+d|e46nJT@(=3Mh`4Ezq#;-_uL_s-zXZtZ1SNZ;&Q za9KxgygRGC7?J^Uv#1PlZVjC^0=G2qt1%;|aMW|{p$2o|jWz(%b zCRF!)Zxx&7atm*K9|%R)b_KvcwQU{pU0{nCGaD~naCfC)q#rJe+*OhJ?|}myLc2Zm0Mt9i{tP9YD%11K-n;TT=M&_#kMt&KbfPKn-6PN~ z5^*c*Gpl-Pp87NW(i#ixSnJPy(EA=7nW*+G!OW#J4(3$Cy)dbVrO~eFVPg(|BaVoGS-dM(*pQ7 zFC$nXqm6M?h;n3Z8ZeReph2M=|JzyIfJM;a@gEV;{g+-5a>r6mz^kYMl_f?>Heu5h zU{)u|61Qyu2-2NoD=4!9Fgtyc_z(#L`1y&F>5!Ir6=>RSK4Ya~`{KETRg$Wy?&>J! zQA$s5MuDAKpFLlgEg_zD45joPoScO_luDjb0C{-Mj+u1@+bL9j4Bw7$lWwR*_nb!F zio0?KTLaLnbeY4mAuu_j{p!%?VfHTfI2Djq!$e_(<&48OS6g3c0Rgu)vCO5z zZW%%1^ES6NEi7|7?>4hN%%;}|`*Pjo7^$33*q=LuP_xzGYB#S}gIMCIPhIBF?te^u zF(|rpQ$(vSJubF(RFj$W=&qmIRMf{L4Y8lpfD#bHI?jzxS<05j7s|F z@oz?R49j2z?ek#-NL1M~7L0HBd{8uVH6q%N??T~GBGho0C?Zio;{hx5o+uEoOxc>v znj5#BSp?U_jt(A1N($&f&JJBTAF!wiNH;SYZCFZUJY3y5>x|%WZePS12hBLo)QCrR z;=6tL%4{2OOB+U(97P|m+I7G;z0$H^g#y&szq8`n?>ezlTJsy*()fW#Ut;f9+_$)4 zD0}?`euc{&?$yTvaSrnkDp=Ym>@3@TL4)kjMa2I_3I9>R7!2@xjV zCVxfSlOkFO3VJbUfT>9Pw8So7N+sBPm%qqvC$CI)+sK62&#YPSi2YaQVW)&5!L=Ib zUD`1nHPC7I*t!iPg2u-QMUY=#Owz%1WOz(Wf#pMj&v>U(hl)L_RC1Wz9CHh^N{|2W1ZmJ&1ynpTm$4n8XMJ0Q4Q}cUoO|BecE?#27#n)kSA-zuP4(xxdK)|zM6@gb=QJ}4u?Dc72K|-& zeWf*ZiyD7*!-TrTpv>cFEV*(tMegOoVNS1zWf;_*Ner3blyVqbVG+`?_Ihc2?!(jsuOm#z2S2rmK*H*-#fii2t4Nl8ZQD9%M{z7 z-8_sPsta;#cO4I zSK?B#9A=G;?RM+9HxhjmbXkyVK+%JCiW)?=;*V65HLO@_?95_Ea$&z%u|#yf!?oUe zz&C zVfDwf2VoxLNlVg5>1X>iQbK#P;E4SXtrm|iU(+UvHbwQ|kt+HI!?VV5h_!9zjp%|N zycMX?q}Gl=-asH@du2$HGwpf0$0Q&by_ymyxiTZK(c}o3WWnX3$*vU9EsO&@}uV zAQZ4HzC8&YDaq(c8~_JT{V9?%2z_@j+zYj4c5cUngej62i#|>TJN@z(l%h&+!wL_@vF210n4djio5<2{rg%N%T`w8yrA`l=(u!(%$=YQUG5%8$u2Kc3JJ=>qv=G(i0JrB z$F3i+^nX34;6&UxF04^i(aM^zccp23S{LFIzU(<;7{v`J8V|rGO+5ZjfYdkW3CyP4 zEFG}q-z-7Y?+6v_lXy!uSkwLikeFGAf50OHpAai(441a^%eNnu9wHlWi@5%3b<|pe zGcg_o1X}lpK)YD$T9<_x2G8f~XAC{vf~_VW=~tn8WfhHCtM5ARfG~h}skCYXix+jb~yJQarS?XU1 z7OisyqYeMnafktqgNRGc%5bQQfF3^WiX7iMg7F|N31^hn6 z+3`M|`7&F3<`R%OihTtzIY$mAhzMTfX9u!8mAL@KYtf;grJQd^q$o8WC}tW!4Fy5I zl~fWq+~lb3YBV$~-=~&yvmT_1yygdW*Tjzp2w?-$BCo3(_8q zIXCFkNk8_f%hmtM-+^PY!QWqAH903tIehD=q)#vGQdLfvcUn-}rOX?uzK`Ut!tBwv zIS^mFkk%mBpPfh}RR+lEhmP8=nQIX-^>#*|;EsgTV5rG2M`nu;$f2SJ+-O)&aYFD_ z(;_?FPdCQ!^u`y^^>P*eLtF7t1vW@F`yXRlNN$t`e{k>CG?0>ub0rC7V(pa|KI>0- z28DxUh+#$Ajq?kb%sQ@~t;+uQT8Un)=~@1+ml)o->&4bI^+p3mAXt2zAw6qtX4z!4 zTpy{R!RV9vc%fG60fRkb-y27}DmFgf!Kz=`CcPh5$Hvyzw&br3HvIs_^<}{o$Ia7C zoGyM4l0NL$gFFb{S;>yNS;V!oMfWABg%EVfwo}OiBd5_XSN>Wern*VZNf5OU^+@%j zohqkyEDu$aL+!oLDLZxCY&{b^q`BJ6H2<3E8aM6^N;234udmFoD5A_oquSsK?p{~& zf8AH*t^jZF;hhcHnsW$wmBPzlJ_0E(H=>twATu^L$EkV0bd808w4xdMR5W_%-N=_O zI~w12b49tjn#5iHlL)gR3=?;~!lhyD&nzDPG<%aoT5;0r3)3L!E}y3B8oQ*=qgSj~ zG4)ArW+A0Ls__+xJHf)6Px>Vbc|N{a;It$A&{eTR$boOHd<1@T)d{JK^}C-$UEf|e8NaY=@!^NeM^rCRi)yBHwasbnc%c?@r!Upa zM3Gr60(|s}6_ULqJXXc(nBGsBxOj%n6shmfV_L?0=QD7MA#;$TAm%!uXJR7Qpxm`k z`u>Kj&yy@);7jU049rN2P`eqPw}>|#(A2qnH=!RZF)@V2uer`MFmiIwQv=-gJ0G_R zGp=s&e$Vb_;4@eu7iMI0bAl9DGdOf?zk)(edRA|8ZdwJ7Y#vh`Xv3aGQE=Me)UA$U zHSy=ctgEfNpbs!7(WwgT%LrBX-Fvj2tQg0+ShSc(VzocnWTz;RWwh&bzg#V5NYL>9 zSj5Ihiy55rdf$hLi>LY#$-AG#|EBKUms~N+&SuZG33et9IhW~Is=@u1VwI}Nm2}so zzJ(T@;!x+;2s_M(Qhc2Q+UDm;g)9n>vqCv0#U~e=2hI`azs==uUuUu>|6GRdFs%4` z&FwX}y-CY#v{+*oseVybk%ppP!kG=-neFsEpU+Se*|&Ka#psm%59Hlm+gJ!XN-YX+ zczD~su6Q9{@NUOGJdDdX^ab6kk)8yxg!GDnOr?gPXQO^F@3}6PJA8ama~)zCVU;M zXVX{pwU<8m__BeBXD**_jDAZZQJkWcYb9~RW%DupiANNb%$KvP9^3V9l{aWmbW5HC zeV8hI;Uo%Htezw)9F4pO0etO|nCoR*!nL&|9fTK(;ehFY@UvoYnwE&KD?D~NJxI#4-0iAUEQP7pO{*h=}nJSOU5Vh7Z*QkQXY z|AeQ1+lfc+D_C_WKblPywr5yzTNN_q=ld}bFrG?G2IzL)cNaU#DX$u+cBbu?=)JqJ zQjkitfSX#(xjiqDnp_;YET-3+!(&H2cxb|I-7{O_-{cdVK#c*O1Z33eeIX>Thlg{Y zGL2(3jXP1`uW>_zrBU(QiUPY#+;ygzLp9-%cP-{z95OG9Ri2Z$Dn=)(uDX!4JXRHN;^N*&gMv!) zT`IG=XRfYZML+4C##${rTE2{4->Xcc;ra3|hUeEr*2-LjE2ZK|c13wPF* zj5R1Jv@N>cmyRZCw)iho^X`i-0y3^7&Oy@osk3=RDmkl%q%&!)7dnV%lXT+$K|1l( zMA3~X9`zSbI$^RN(r_>)Ck!L}l3P_65j3K;P(VlL6|QkX8BkAN_^{0Bo>YZR$>y8B zo%mse#yi*qH;%z(x}^G+mlb_o*^J92O_z%k5P;C;wj9I1r0Te>cu(U5*>%aeF5v4H z7<8T$Pl@#Y@`Wws-(b1W!ch9NKfH0etos9}+Umh&R$~UZqj>Im(2uS0Wqn5dj~HQu z0Q=|WFIVl!Je~p)OIYFcbk{Y%8|hg>YcGXI!02PM2*>ioz4lm&g9}8(nv0R-D%e+A zB-p>8lH)$_uir@u0-q*relJ7%!HQb-LR;_x^oaHCT@T<(gA&yN>9l;e6~By7s}I`*-i9uDBW1wX+8z`WczOw z<*M#xJ;i}sowgpIkn-5I85=IgJ6aTm$~Zev7iFVbGU+;+(ni-{@4Ck|)=UIdvu{PRLvpB*-t!Kc15i;ZsXF+a1W7!9GL+MZT(Pe%7 zN1*gpc;SZ3vk*0##=!GYejfM8zdaibl^9jXUgH^T*8e?ZGLlPdRN)N$lsjYRg#%hV z0qo0J!e5~p_if)LjR&7eHUls+rE6BNoKf*7vTKKR}1 z80`tC^!6w#!jDzQv(OgC%NX|f9d)9b;ZjDLu~ZkbZuoAWd!#>_E0kKUA8`K>cXo1gP=TNmP989E?)pI#U=Z@C=-3zXl5T0asoc)Jbh@^Z$^p z#c2PmbEhZ-a_ynQaH?e#BB|QdS=fNTPfkjFb3%ZXK+^DhVeRT-<|^gwy$uc&PAg-g z&ihAI-#Z>kZ>q@Hj_xGMA5~XXeCoC6fn@_QNM{O=1|RyX1Dum`%Xwo&kREaM3!PUm zw5>sOmz*-pwUrH5(?h4N+r{{<&R1H=FT literal 0 HcmV?d00001 diff --git a/priv/.BUILD_images/compose_up.png b/priv/.BUILD_images/compose_up.png new file mode 100644 index 0000000000000000000000000000000000000000..c439f5395d6c4c05e2fa5d205aa14452324c13b8 GIT binary patch literal 8136 zcmd71XH*kW`|b@k6a`d7q$r{&O$Z8zf)Kj&-h)AuDkai{(4rtHi1dyST4;hGMS7z2 z9zq19gAgE8385sUobWvByzjgIU*2`Toewj6_RN~S_dV<0ziVChL>U=8Wn<-KWnyAt z)7DZqVPZPL#kh7qdxmkIu}}sx8fG7pr>acfhWPP}&M9XVeHA9A?{ViT_NN*BbFa0m zeVCZo+m8?CZb*p(6O+I*ZFLp10PtqEor`VvUN?Fzj?2mzIPvx32N`j>bL7gTo6&sm?TsZl^m)S8&}`%G%D_N|w9iViB>m8P7Y;jO zV`i+9X~|xhe!N_!p3}+4!7^Pr@&D7R+Usa|c*2n_qrWY$JFLeepR^kGlYNBH_ujUG z&=eT}T9#Pf{o(dk7|SDx{$h@jqROCm1Gt)C<$mmk+F}5xD#E$&HR;}?kX9kPTow^4 zq_UR?;?@JBdhBCRXdA1^ zKf60an9GdZi$7rxfE$sWY#ilG1B77vggPljJx=bm+Qbn_EH=#omie9l>HO5+O2rKU zlQNOX6zi$tCb{n+y6!k`wzxjKKvLl%!qD!)7hxcK8gZrjE*4TLOJAU`yj(;P}Qfgv8UU~r1Mx(m-2bdFbHVtQPPPV*y z4;XXZZW0UM%#csnhUa@FEVt@&Z6qrB!7tInSp6mEI>`RvEE_2b;WC|Ga{{+Afl~4_ zwS-G2_p1$(kp5<&0?!A5{)y81yyK*mIA?92Uixgs$;plMkV z@&&KGEL)^!)oHENpn!~_^jZ|!PTK4XnY?zOX{SV?3vq~u8CK}env-_oz;6c`^w0}eA> zK=^Db@$qjsh{VULp_R88Of$u<+=7>GnkM`~KEnZxY5e`d!lhJGNp}KAU{sr1Ddp2I z2m9=OnL&GAkj#K{7Tf)w7u0#7QGJ*6kuQ5%dG#lZ$;+~?T$j+}pH*cqCm0}yKTpq1 zExd+_We>q-iHS>j`t&+%-F+>Aw&kDfRBl57W5s z;n#hS{5}HBX}0~vS%oPmeWbEsLb+MbJJW$-Si<*$3U(8}A1a{I^^|;-n$cfahsGqr z8FvA}e}73huMle)C)!fEhOb4Wt7*u9L0a1D_gw=u(w<8-FH$Q`@NXODWmBA8U7*;s zh-9*y+^ZrRZTKm2&t|}B~vV>48`&V;W zA~k{dcnn@a!Hp!wEj%?4qHIVc2xd#Vr>%AC}tdvT>m2*!<1L zXcwB5{l(0dPIokE5-FaX&^*}V-n&UtpX@=|DoJr=nRyo(F4`a^_ux^}1p9szcs8@L z8}vtJi-7p6n~w<2CxMi5Q=kRd+6Znl)Wd*hL5@guPRuV6H}dHPvX3~dmsn*}$su|* zdkA$Wq!Cw>Vstdb4>O|?Qsa!+CF3`agxt-m+>-~!fYw!gz|}i33HMu=O*|WVA7sg7 z+{pLZ-D|vxOksx0{k016~)k<4Z+9+sWa~6fIh*tj?_qqRU~9 zPJaRHWYv8h>-rLkvgqpYufG3p#86~ff-cd|!n4*b3cEVeKYr7O-q;He_(7>pkLwgB zzftz4g_HM`Opr-y#fV^an&)npN;WF%qzu>ON2l{>5b7c6%EwsSROrTq5kYDy*+&${ zu?Moa7Z!3|qsN~YkM9TkXXkf9(oik9g8k>KM*!Ip-WncxLwa)TGn&e_Q4?jce-K97 zlp|Lws|yQ>17EhE@OoXmqqlVf0xih3#5++B!Z-3lnBYq8bAn_=YOa=i@zzd}o^QHv z+P6{E>bz$(kClrlm?LbTndwpF&&X3`_%Oj{TKoe;gk!^oBw)vCJg@!^&wqTu%B=Gr znPy$+NoEc0R;)QGU?fEzmF|Cho2pw+D9+*A9xnnaA8mL%p+6n+>ws``{b9c;K-G65 z^}aI^doh$CLb$%2^Nuew;i=5!_=si2tq&2wad_?h zKdsCiG@Znj`V-F=U6zFNMoT#dg6H@zd)VwdpD>2ePB`nn<+eP$Kp%F$J!0=&+`a>K z`a(P`ka2NC@(|j3|DB8a<)=e?WaslC^pNZwK8Ay|5QCN>+d2@Neu@Jxm*s9Zb^U4usU_ybe0S2_);WO5!r(hHkBer3;XDbV%G2+mQZ=1*=^;bowiB`f}@q_f`S zNPXQ|-Z#b%-p`Vhsq}%e2c>VSdXwW)9T06Dzu>S3Np<7VDs|kF4e)P7mRFa3CNbm@)n#Oyw5$7zNiM z7@EVTy*b{GD(!4aHDot|!?{7U#_)u1*&5DA$`ty~_25p(q~G-!UEikvPCr5bDqg!$ zsKw?TadZ9Y6NS?9Be2pvyc&JRR7jl5L_ypb$DM!cI+c`VW5++@+hBG%g3o$8!RG5l zi}qTC(rG`P>0E1&-sJ81bd+8?+syq?^uQXVWA{`I7FVDh916DWx9pjo{i^1h+ORvi zaC*7Y%{QNBU~=^2s>s1&%c(Y90pkAK`#-OubF9Z1L1%U_g7)mxqhCtOHIs44mK8+{Q_wtt^I%tF`OqAC z#=t$aYFbk{gQT<1f-bjhe()&_KjEZuO&vuiGp7NC`f#_V**O zj%zv^x20x2Y6{B*AFex5%D|aG`nBD-L!r$rbClz+3vX(d_YNT8c8_;+@SUixyIM0P z?j#@kpstuHTmRZlv|GxrR!Rb2W{h8dFpjeWppd0;Jr0JVl1Pl?;nPq0!L9qMCMUfy zRx&!{T}`nuh!&j}?2+D8Y9KDpVf_~km=kUcd1djrdL{0=ajaZhZ_t38jLk@KGWX+t zcQxx?Q~Kq{kD_>%BJvf2jOq2(_85{=8aeNCCKVun$JeZ&BW5>Iom6j*NF?1PyzHQ} zj(u0lfQ~uw;YGwxRm2-wmO>~qQ|oESDR2e=(=qg#+sSKHI4~)+GHq4|=5NNs#Pmpq zSXww^D)QTV$SHwOhIv&b*Nj19;HXNVsb{Q0U__h3KW#A?9(@86kaFVA|4Z6cX+%E5KONOR3DF87^UZ7iZ@1=A<)>wrGh4jpxI-r}OTa@lK_D@at2BKZ z#fdiQYX9{Nt)zH1+5p49J$8``Y5LMqyCmilK27Skfq7s&kIW8wYyI9EbPKe5CBe&U zQ((mH?srutf(luGPfw;ypR|kfncf}O?;AJ35Qom{&xqh5O)BAIfKgXpSM{O@Aj81j zRg@&H`;pds$z)WBN~kh@k+D0IvRy|Gsmi&+CA04)^ls>wJ-g}(x=~n7JHvnHj#%6$ z;NnS{G9=Sokp3i>{u;xmZ9SHM#*m)IRVWEB6Axc}Yazy4D?3;C9bsx#Q3gpT)Y;QB zQ`pB|=u6v}eRb3yulMapuTot&HlwJ6zqTNu$?P~eEFx?B$!^gwv30IC-M`Ltki$|y zE~!M9(o3qx8Af)e3$;ssa3O~iUOr+6QBMVUIZC~=WWf(!mk?N{pUI=3J65_MKe+{* zAdKIDj;yGz(u}B30s@e6VfkgH-pFK}F7=M!Vtr4sH2^awk1%}E*U;>Kl?#@)7c{-p z{jQd>D?Kt2MGwhS z=*mIGg2D4(GQ$KH299-Js0kw;ChbD{?MP2=8S<&l@Y*w1Qs&J`MiV zhI*=`!OXgWOU>n zIk>3aaGdCYep1l7E_QDcIU;f^l$)fxSlAyYLD#c6(oYg7V>fcF)~*YRyzKp7CzyW( zNF%R1(3ww|4MHBimr{^=tZtg)T4wV=uY@hI#VUWNQ8`}Bhheq@R^?c)^7=4U+In$Y zS?cNPHr&1flT&Y?4_r2%mBeg?yJx=grtDLSU!)f_?i%sd>eM)VRjC_gF5_MQ$}nRx zKg9mZ@oBuE23%v+P&pZ!FH1l6M5%CD*k&Ri1pJ}r`e=!Z31`#GtXZC-Xfi3G2i9HH z>#PzZ@%hj(xu~aB*P9YcNbAu~69Caq^@s4{X(&O#OO88l*JL#YY6FMrSV;H(8)L&oToC+x>b6iyY zEQt-5>6!#LSK4Y<7+$dGx@)+%Bc?wPUpW?~hqZ};mM`85gw&Tz1R5)R-6Z*|%cQni zeptPCjtwM}teD5t6WJ%NQK>>&{dXa|NybAmMSZcqDlK1sAE{k`BBJe+80v)R!w|(= zbf|VBI<)vC!G8qTJw~g!o&3>SQFBMg?agrgU*Ze8S0W~{m#b)ufAE*ncItX2=Nv$$ zvqRQs>uuPcn2*v%@~Dk!KA-2iW_G0d~mt$IRg^manLtMh7wf_BXLo{1m>c zS;kyO=+Xp$w-ah^^70M}U6?{_VI~eq%l#NP#9%4~Q8-EzH_bcWf zx!iKRlmD#Hh<}!x5}r82pIbVt*LQWxBD8lYCC;E2ElGdHX4MTVRWWZwZY`vu97uhi z^p%>kl{9h=cSpAw2^vJhdby*pYSkCUAKD0;lKQx>XyXZS0699Ter~n@C9B(38hfet zoXm%v^*k2!u4Rd1DXTTS>Q@b$h}n92F(B(xY`dm3<7O_)Bc4=NW)!QizME3ZcNRWq zEU@oZT}7o$x&(R8hM}2_U#=le9KWA^+EnJa{eMb`s&G**@@PTWPz$<~pb{wWzQD6X zJ$YrX%uFPt%=Qzs^@`RTC4>)08Xic#%*14H!X6RE!n>$ASFr!L%fNpAvE&UjP&rdg zmN95<)}cw0mg$>W8##6&scb_75@2nHRl!nUwud+Jvz?_ zbt4}srP$`Gx}XZr7zQ8O*d6@7R(TW=iY4&g%-Wu)*pb4-9FU(AQ;her2F*iC?<;ao zo-MY!*!!~RpH{jq=|MIBjSF>qybj*v7J`BXE(^@#7=n9fgH)kD$iNAE!+`%f3+VY1 z*~+n5x21`3yI5j%OQ2r804+yp(sDtcd@h7QuEa|~5p77NT_sFG9NzF&tv0ZbV+f5M zy+k@0#RGff$%ugMcaIQ981LB@W;5aYNq?U=z?NrAI!-c2ED%qm}01`ExmnUr{#$NgG^& z{&YLycnCFOe#YmW<6m3?_s1MhH$;Ift$wIQ4YDy^3?MRchInEka7zeAl%#D;&?CQ} z^KE1U1a7voCtLu8-g;K2ppQ>MpWD{jE<`XDzt$y5WfKk{x^`eIQI8!f(HuEZ!ZCMi zDSqZk-qk35dJ*?(%XhO&fAI&z(VpEak~G%V2Ly=_s+nDp;37R$DKH=fOc9_$8`SME z{={J;9Y)eli6B&mbUNLC%viEk&^EGa{pt)HBt0AmFDx(S95iEJi3&B@37!v{D+E1O zT^J)g4s^$(I~t5cEDFXX^Nh+_@3i&S>%{dueMfPoD^TI{jVt)i)NN6BVku_)2!Ihs z#D3c@*g3yM zO{$QLUi3=hP}o=rPw6Uby$NmSyao*qm918V1yHP>Z3pWBg9xZK*j--9!@rnr;zW-l zxiQJ)bwVhVJT-jdJR{jPC<%4dxpH#6cgeEe{AQ={v<;Tk{B)@2e_5JAqjB}pDA3!v zWCQj{ZQ4F3Pp%k*pZIrAY3LxWBrr?v>e|T?PxfImm%AS4>r>)Bw~*?Kckx+4JuGT{)6#gVYx;-W6!=w{9Z;RZuq!1*F{_QiSHvZcLVlwxLmmmH5}RmIGq8 z9t6_jx1~m&$9MlCumD;3WSN;1yLw-Xbgl0(D0w+hBheXwuf!ZP?&tOKQWqHK!JAL@ zP>k!~F@KmI8pN#oKwZpS1ehUTHfA+@{~BH8_M5l7Ai)yhh`{cgWH3_l|HDYV3rmrS2*a}6+6dNJxrJ0@)Pfo~EyMzHcg{%ekVYO2 ztcpA@wtPw*2>sQW$IBRTSU;GNYww*i^9l-uOTg)A125*)Qq?EFaG3vR<;Hn)wDg)A zr#f|5;MI>*eqBOu$WIgDf-`P_we#59h`U!Li5XGZ_GKUG|0Sh(Q_+L&sO0rJZ+ zw(Xf$4v8!(Y{S<5^GynXGquMB|J0bH;0>R7K6Q4j;My8zT!T?D*UL>fOMAEF1@RvD z@G)X0(s>e|Zho@#fX`>%_dXqh{#{xwAwlTzY9H71FnD~))C-=!qrF$I#~P7APHfm3)<%DHl6w}?eyLkx3hD`jN9 z9O>e_GweUmq4VzXTgz|nS?X?G1F<>{I=(Te!~EvE)H%m?X$x4Ro>H${dvx=f5Y~0j zE`5Wsz67CE2$)c0n3lG^q2CxQ(0s${vBAT2Vp?|EYPdA_p5J?FmvbJfcAjvAdmM4T07_3KyZ*3QSEW>~GbM_H20(pk%E%>Hua@2}y?=|%^ ztNANUARGBiL1%~lv^7GrBBW?=+508usy7HMGNn;?bON*gY&+wXkwayPLBP{M;laM_ zHVIt2PznUhs%1x-y!aWqHCWGU2C4)7Ft~6uMT7*t8xn@{hmaLRWC>3qKA}`AV}f<< zfTL-yJ%=bn+>J9s9pK>q!O0ha@m&Ewn(kFr-*z5-QRL=MFcW|nxsQyE$|p3|URu0U zvluX|X!sQozZx2GZzCkh6L&TO)W5RYo1^q*?wjL#2~bVAH}Jy872wevo_-3gXb9PW z87`!h^MWaXHhODL>GQgSBDOioUpTS%<>oStb zU3uVzL@|C^ws@HcDEs-mEmBRr3Tmhbc3&EVso=A2B%-XRex4bWa>lQH+P^2e}j!odwdBT8r9AyR%sT2d%9;V~rg6@L~a$egO)<`|nds#B0Lsc~%?iaeYTl zlWw)gaC=?M?eVAczm)W{wPOH~=_a>u*B*488PszBdl289Pioe-zu(#2ujRjT{6pP0 slCywOXki3h^PH6-694DbloL0}u$$7|9jKUHyW@ehH4N0hsXh<;U%Kj2XaE2J literal 0 HcmV?d00001 diff --git a/priv/.BUILD_images/docker_exec.png b/priv/.BUILD_images/docker_exec.png new file mode 100644 index 0000000000000000000000000000000000000000..f6fab09d02b6ea518f42aa6b10ad2e61d826fc6d GIT binary patch literal 12758 zcmd6tcT|&Gx36)#H>ikgw*>(K8=@4W6j55NQ~?!`PIS{05Rf7@iHe8Ju!A|m^`e*Nv}_50{1B67jo_~; zH(%M68}a4Pqu=;Dr~be&L(Ds%SC8f8T+tEic%^xEpOf1G6sM`_lBHhXkRh+@t$BGo^}vQxNhuUOly_X%65*9;uFDVch?*8Gz#8Y7 z(?=O_0!%@WQpgNG+3V9mnDl#;oA}g-Qo7aqHVrBwmr75CxD7?B$8eBx<4-UIuRZmI(pjFMWv=;tEy-3vzv(u zopnVKMbxc_Cimto5k?tMm$)_;mrDMQ_4-(q4?4&3YN(06`n~FfW9%1>S`JkWVc2w< zw)+FuX1)FPoN=vBf01?Nu=~g_l}6GnByXG>{X=Kkh)|hErMnwq*EF;5FsF7w`aIJn z#hw%c_i%#T@HU?ks&v1!;#M|Okm7G)5rPLSlCt20ewV8`PMmZkyly!EMYca$PNj#Pm-oSy23xxo;m96tf=K$;POarRysVjK>7pgswhU>&+oWc~qx z4x$e-x?+)Ve$GZRIlLWdBHaWBGWy|M_PkXEcioC7o;q$|WnjX4gZE-*?+=YXW!X6y z6C8~tdzDWGtj%Nb8P=SjQ&Nyi3qmZAl1osWk zxBcH8lNp^q;}1JuLO0TsK-WC)KJ7t`kv`}4WM$nO>b)NP0H<+0c_4m|N>)Q%S;A+J zw=G+n6)72(uybFn>`fZA^7@O8nNqJM#=7V^CtGNTlc#1-dHk=e%`{^@l_Gzm+52ZQ z;2-oZ;pO{@y<#;5bCF?(|A_etm>8N6Kn}|1BGU*8hOpmz$r)~DWX^oew;7wr4;*Ug zp|_(-l)IDFS!kuU6`T}j?X%C<6&&2H$Htg#esXTsO~7WHfi4Xt0a;AOzS=NH?Y7Fu z3YvC2g5*YRUoabxLq21#eG^nZ1l@4~zVm%c^DCi?eMQ$pL0r1lI;yr0F&GcO1ieNC z8MBwFi0p!9jHjkQFW?Cd&7P%QLOOZGf;zTv=KfeoTP~OZwFwacRk}FG ziK>F+Ge^~s9BV=3TH}IWE-ps;7S4~x9N{A=>fVK-+O|gOrqc2Vox!c^j5Pyi znBNERdGcme1%G;!fiGT6n%no7FscE*dJg{6!|T0rr2db)N6d+dZvM&bpQ;f~7P`eI zhAaKeB?%Yoe;Xa8zDtT6mMWPxd73FLB}QZ}I-BXB9RrdKT(8^oZ@Jn-(-xxqA|x%@ z2f|kdi>M2ou=2Q=J&v54Yh5kxZiM{NIoAle^ypHH3>;gI0{VS|%Q&+R*cU%gDRZ12 zbNh7aJhYrO0%V@Qt2Ars-_@Mj*j7O;J%X%c&lWS_%24m~OCMOtwPNP_6!A02PuxbC z%UN*?+!2L?$bKGYzLbXb%?!%7S#U4wprf-$>{)ZrP8{4TpMCizvuy|-P}_sv;stAz zlZ@{3?~Ok!4Xct38qT{`Peu=We!(luP%B!5lLtUnc5v;#v!yVQoHcgik=!*6Nlt`~ z%Jg#5P0=mUx}|HqAA%D zzzxeAzRj(eL=M-7>g$)zlMA6rauHiw(60u_0duo<{4H)J4Xz3yk_We3W+(0n*y)FE z8%*T+DKOartVb(CY|+o1*9s24EBp1<*-BG?fb#=fhV&A zUB~t%&I6MAx?P~@x!;DPmGYIPL7ABPo*k3AgSx!h9=ND2t2D}o=x(S)Q(C>Aw#Y=b zco~?k9~sC7&d@8MSgGd`k8*Ml?{3uU&+(QFYY!B&T<1zLR*dc=2jy=gY*>!AQ=d&T zuCnHq)P#eN{Lz|Ro01GjZ7v0Vf>^HsPeFJO*?uxE>WtdE582Mt-p2KdvIu|Jse3C0g70(B(SihD z>c>Nl_*Ut@s2xibnrO@EA5)AsTdf5O4NgyM#PSqQWaKxwb^3jZ9Ms(C#bJhf1LA!= z=cMc)2(I=Bf55qZ3R5g=Di~#S$s#|88NdHAA`@r@%*!@L=0kGFFKRn}ms+nASwY#4 z(p0Q@%*zPmwSiiQW*U;H%cEwvvrK|oPaqJ>YqDM-YZqwA1`i9&kcLZY7x?n3M+~g; z-mTtEYywA~*JRv6m!zf?997EOHEr?+PqHsYY^X0L1k5HtqlJueab(LXv!4c?;bnBt zRO}7zH28Fv6)+2Og$*dp&tetj+b#O174*5_(P2P*fYo*Ia?+6K4;zRi?B&XChiwuDu?xssEm z^3Lfi>oU&|z4eND40;qx&nedaqSO^niHlzJGiKq`19=<8z7|AT@vERX@%MkJXv+?(^V_7x{A2d|1&0)@$(X`{@}odWJmYE_9Bkx-m#Yfqy?zOWqO0$527*rYj4vI}UrF%O zyq&_kDbK*<9ciaR7NuF#2WhVUG->;wA%G{^nIX0dl!y zvXQ`p=d|&c?3=T|##vz>adk@9{p>HU!fFOBW$yxLx8Ktfnp`-CctwtXRhPv%0mo6^!B9l%^?vtOU-zLD@${Kd}{|gUB!AX{%&mebY$ZbT`GNM^bG7h zJ*%WD}Y=2R&cJ>mx>YKS^R=KF~o*-kZM#+&VyXKmy6JTJl-GAWOsV1 zfj~U#(eBBGu$8N+Jb5<1=Rj%ka7PVZMhv979QuTPDjG;EB{uYF{}fx)TH z(5a75Uq0!2-D%*AU_L2mkmmB&gB5XK`t}BXcvg_$;bbZ(kjB5;ItzH&Efhw}e8v zwKrA744MsOtr{gGDU#j-b3B59MSg383(i$3Aq}JjJe&e$QgkH+Bo!JZl|#;`HXc4u zk0OhxzMoVTwBFvj1QZcznqR-q+1qrMZP;q>>x+IV-&e|fwZ%nCgDaOa;$L`nh)C>F z_^Z+k$V|6b*dmSfgS5gI&#%=z$yE@#R!#5$_1fG8X+v|puAYkNsVL9qX9xTY#bv(A z9Z=HvyBc;)Zn8|f>Q510qaaVR9>lhzesADSUXI+FM#oLT>lgO-k};Cn?i*c+BpSHm ziuks_cIEn!Jv+Bc4|nZU+b)Pmyb*Jt%Zz-7Dd&peDSpQxY}*H30+&mNUDiu`RG=Rk zhpEUl8tSiX$bb}1s&TR(hM`4~e>HK`D$vku^k*fpNaCr|gT@i#TXP|bQOp#>1sQy_ zCc9I%Kp|FhJj7mXJWqgFtDQ9hLQ){WeX%SQE>Cckak39(NIO@6VTPXZjsY5zz3J>V zca?pCBa$@Jd3j`P*d;KJM#h5-UTFx*M_^}V8RZs^+-#!uvNJBRgJ)ZWR)99XqgGWS z&GQ8bRB;`s2SHb$+?mJ1+3}e}a3cHAJzxpaU;&M$@;GzAxOcQt9jLI~-zztsA0Dww z$ggx6+l7@FA3>vP+&ksJz1m9%Eg$-kN|(EWeU`QqXeWVnPR7q^D!~Wk7-7@{orn$5 zy-y4Ax!N2-lWI*!+R8%?72n?O`wZ0AO%i*=ZFGstZO;sP*+wAs2|lq4<|x;KgPtzl zewEEKX>Qj1d-Fmh@*9G^V4^y{C!QfrfVwY_KHXC zSdnYQv}R#@6Ro2w(D>Ir<)R*2OU%N*vgRuc{rJ^?pN^cwzIij!>WF=KPI9Xlf2dk_ z&!d@;Nlchd`WKkV^$D0@eXTbj?1PRe&{Afm0tx7wFrjj`5IhbQSP5}Lv?puWr?1yC zqjDWsws#Q*vjmvbJwZ7Mo(PXTf#8al>g1H4jHDo&Aao=3OOMR7*D}kf5!m#&t-@Q< zO;mgsZ`TwwlpR1xWDzavKe2W9`|~qK;Y51096rMU)C7R$RDgnMy--Lq5=QMO#ef(r zQY&JN4^b{tNFzx3^G8v?tadw(KxuGw-X# zG7kc8tx|3W|7IgbQDqxz|7Gc}*q+nC`V?x^z-A^9{js0)9PUsmN=h4`VTb)H5|J9o zRu!MK6G7xpUVq*sd!Jd-E$_NRk=<~gMbKjw&^M~*<=zSGSqZIPMM&Q)?%RIN>}vpc zMxh>K9iJjNPO*aJt-rx$4NwV*6_F$E5{sEcTHQWNqHP`(K*a$EZIKmW{YHTcLeEBH z6u}m|*Sn^w``*>3PGhJ3It>-b+w}tphqx`w!$)=RSIGyXIcygT-oVE6CqmJY~w)=Q&-@_dp)x z#o+vL6F?7q!kFFCb%)isJqQHLYnE0(Xt|E^=Cp|?-!ttooTj8a@-eEQ^owUnI_?Mb zEP8WT4wn}DMFq-l>cP!>t?`534FIZ>qFN=r%WD!*HKiLXT`AcCV+9M1s z41%Xzrvg{GndhKfJ-nr<4R4n|@EQ_o3P)QY=)yDXCwF(>FJsXxHAG#+{W+kmY56L^ zM@OXP_|~z>W7^($o9pPHzm(oO2O3@UfA8jDqNEr$ZdGf}t<>$=x9Gb%qPSwsef+2i zz|*1{;lcXYx9i+IcU!(=d4qP|yT|(N`o#O+8p8eXlzmfBMck&5?IP>d?j_ zw?INS6*3Y1OtDb~Kb9yAWV3`UghUtlrIRWEQz6`E%;p=&h8y&d40AFz{1K zG2Rw(e97|)U)b08m$gJ4tP3lHq+>sno~?ii&0ya%d+lnOL|LL>I(?6=g#L)hCy-Up zkJ7)UXGgq8M_eP)Upb{$<8dLby*2#u^8L>H*Ug~!`OAjf&j?xWg$zFgH_^ryuG0se zzRb*=`tnw0eMRHgZ(E?gTHenn;>L1<8|KaTJUX*I?0&H}l$Zgx3}ablO3OBT7HOWX zg<7&gX?}YanK0~S1ti+wyEUP;oFpY&TRy*Cr{bURz8~elPJ^?UmQQ$n3s8UFmb1d? zRW`vKUx~A|Y$7wQmY}o0Cb=_|Xlqdb(SA>5>qOsTUnjz8+)*MyQrK&c6tYC|0DUP) zs6cIW(6m!@Q?qna%9C(|(WJ6PsVw21)&Q4TeqPfHphkO^8G3jOa*YjEhOTw$wF!5K zWRTD$zALf#_jRCYAR~2y=nrD=r1pL-`>>xp2<>ie_x(00>1RFA0>vhcKxJ#7Qs~Xi zJP#TsFDNaMEp_ih85_Z+q{1!lvn=C_1%_+Dk)*O-XJ{a2D>V@)AjT$w7!~K0p#!Vg zyannk3W{c&1T+fRE2v)NIztI2c)&nXxm%+EKeh(~(b5I(h3Nm zC(C7YVwOv0H{L>^*L%D_s-rk7!a{gh}>Z?}T-gN=uDY(8a6fW@%unoXMu?*#d4 z8bS8sVSd7M&=!$xGH=}SC|7le3Xpdcq&{AhK08E@n3Mkmg|h5SX_jTTpo8>7L*VWl zL9^?^=Ik`G>K)=4H!%In!O10(pOXQCbe+KHK73aozLcl}&IU}~wSoRMvLJ|OS#j9O zW!HC5M!a$r#@lEhP&;L7OfuvsSNO&4qp~jaJt2|#FD5Fn3D7+t946s4a{v7=*MOrS&oqgaQKjN=6itVZ!(`R3jd9NntHv&XSxct0EB5IcEfP={p8N?($kf52qbv zR1?%8jz7iWWfba^x zk>Pr&^<4@@TUz0~&6-0omvvkviu~?X02DHrzU}#6x2m56Kb?gckRAl=81R+tAGM;{2Dm8Jt)RrFl`V%hypyJ(YilYj8cJ4%zoJ zZ1jhP%jPkYcYM2Z)_PE4H`zPu0@yz=9qKkMb8K=-x^E>{)KQ1SKBA&*Ab#o?^stP^ zu3uAf%(xpYp$<% z>F~g7)@iUVz|^2(%HXvM@6M0X!lX#kt}0iO#m0JY3vYpZZGLG6YR7NsM;fCy>_!-! zIA?F~AXJ?)d2VdlVYZphqTvb~Pvl6(1gu^HOj82WK$Ry1 zamnCl7*wMae+Th^aZ(ED#CE2LPa`d>*nuMqm5x!0cpKyw;+vO24C4?x1wpD&KvGao zx?B7CONI4>Qn@7BTI7;Of$oKIX@DcVH$X-^eWZsoU*PmQe|M7R$HE~~bv3Y+oa zmbV9a%&wI9GY!+2L(w1$xO7_u_{6Psd=(97}h_6Q;j zk;|Fp!C+LgI0NNg!u-PtDO{t^PDeM6zHPc5QFp3>qGbPg#X&A8$9||lrZ@ak_uk|m zZ|QtvQ6(?O3fFpez~!~n({gV=?tBc`;pG+fZ5yUtLz3k@db*$MAi{9TL$q|KBJ_{j zZ#Vyksv4Lf4sB6&KGd)oH_@BGhE6?iV#3y)UUvKO{4r0Qv9*Z?i5hZOkwhw5c2zP? zPA=_LRgrJ7W6GE04sO|^rQm0maq!=^7`kP*`>j$U`s;;IBRX2%Zydwe|gc#rHm*=S%hP}c{ZPjH|OjS~7&f8u0u&H+{ZD4cm ze%_J4tQd0C){XbmOTC@rQ=~J!=G4`uSTPnK->i;?NaqZ>>M*7T^6&H}>b|(sd%N2T z`9?QkrS|XN8bY!|*q(1e64g{ovGGyV6xc$?{Y{MWAO#wXjZ7V|%hZClt=I}6htZge z+S5r19uZ$T=;<}f)WyU%FjGtHvFo=(udwbB{G4U)*cYMRM^b3%AUi&2)wyrn#3Y6z zicxStq){ao&e^jucC3ZzdiJ&+iSrX$`uhjiooXc)CX$E9TDq|R5d2_pnc48CrU6Ed zAydhQ{-#>VScZpg@Anvs>$&TUl~xT$4l@&5o(4)zq5`9th_Nh%@sxAROcfi z)#SjH+$&j%2ap|98Ubm4(bZ#QA^qI+ZFW?DCgVfV>4K4B_} zZ6lNoZ9xB6k$u(R>UEF7>$ao;ollH(_b&d~Np4*jLtWDR7Wp;F6IuCfVrD@~#RBL^ z;_a+LI9=$~w@657o?o>9c>)n;dH*I*{Vj%X2kTuaHqem!z1>IofP6Z?6O$uDIFM=} z8Nzp<5`eYiq!<7%y7)LTO@K`$(k=V7!?M`UHU`n$J1Y^T3i2|gWOK&`bq)E*+1hkC z9HLwv=uK^-HyiJuY^8#X`AQ#<*eJD>rLUTIkQLEse!;wlsFNI)HE^Fh!{5Hftc?2t zjeMbd8<{H;3!7(42&YN2jIn@Innw!QCn1x^uopBW=CY#{&lKt^;|Jh-4ng`n?o4~I z@X;;3tnFie>+WX5{St_b;cmfgDCeqQaiF|(eXNg;SNg9s} zc-F0%Y`N0xNZsS&^1JY3)dZHB+&^r=9P*$1<%|3m1StxCe^a1rN_IFm-}nb&Bl{fQ z0>Z9O3T8?4{-Ro8{hkjz>M9YD$J*h*Z)2o;+};1&p6N z_GSJG#xCX82YlW#Prw8E()l@ws1p&kZzbFY4kCr;wPmk5Gf;gakhN6rv>i|UW#)Lf0a9{iBAWDgtU#antQ|l||LmL&n7rRam$RN{$@*5ZC zcxUnn>giuNpDU5E^S@FkL`42PCKI-tNVw)u~OWv(rYvoWw zx!}@}f{>9h&br293eU~Fq{z#61^Tjm-9yPiAO3mh*|Fg(abbpi#_YbXM~T%7p=kM! z8!vON#(vISlTx@6aZ)B0v}Er%1dMmvc(7{s>es}Ih}=FWZ~o9^OQ@qh=Ehy|o2W?( z%d7-A71au^Tr>cgr!Owsu0LqP_F%|2Co^{>M}-2vOlluQ&eX7SM$me{OKn#&iu;dL zjQz-e__U+WPT?PI@B2I5^M&rGV34+C8q@j9P%M2@8t{)TH6|8c-+o?1;(_uu3W*3c z!T&Ef_5VoF#;!Zn{_mxktXF@gN0dj(s%Dsu;$=Pru!0UEA5XPy30KauwukQ(wL>qM zyVn1cP4Y0a=WQhE2t(K6B#Zvvl**tU8Do6qJ&?h9%5=E?8?y5}akT}7wA+lk_~&1L zrw<_n+$QI(0?6MNL&DC-v4v3k?B?Ht4(y#tD` z8Im_#4TSh~cd2JMxv~fLYl0uTbYqTVk3kz%y_In4%e@NHl!2F+eHjyrHjYF1)abVa z>J7IwPh5v7WbAsVl3vzDNcgdU&r<5qZF*pKf^SPzo}tFc#T@!t+Zk460OwdW-RJ=< z!H{}ZqVfNj&cXI=Y5)CLM94G@nDQwy^sA$RjzSgqRIj8N^uoUJGB%&v_tme}F2p-} z3A2O-CCKYasZ7cZ03S4S}qj<3g! zRc3*EPd*F~p0_{#BR8V(PO_9RYX;RV)Odmlw}#f*r&cU;{XlQN)?RD)B>;cwlzbJy zQtpqUtrb*4B9F-`4`q1X@i*pvSgYibjb^{F9PRlJf|STiUb4>$80vI*yfs#LCxZLv zW(YAQwCpi%VCldNon*1TX?JmsPO?>>S3bqd@PjX`{*xmCr1k%g9Etq1!W@YY2kic5 zjzl_LcMC7nu~&Iz7EcB5i!th1HiWkze|G7-gM$W^q0#HiXLK#yfjV!eM_hkRTk~6# zk(WaXX;LZbEL(R2ReR}{0tmgUZV2{{l-LgCu=fcqc`>@NNXX2Nml7K%v9ePZ}c`Fy$( z9g|Z;aoY7?ak&G@g!yt~{vDT<AWO*9%;$uq&?cgWc?v2!4YF@US?RS!zUVY8y-Dn z4@}>|KCdlVGXk_8hi3raF2o_?;JOLAW+~&E|Ag5=OC1-zxkUd|B=7u`4(czDYK7-3 zi5`MmfV{Bc&Fwh**!XT8;Ei+oPx@Zs@3XSqBNO4pg^6hjXKLO46=+8}4ESUV1MR|@ z^cVC|-*(ny8gA z%@9}^`9SUJo8>Z&APx9!_(Q3dWyzfkROJJ0@2T26^q2LtT%6NP_hHICi1@Z>9649_E>uP9Ad*OqVGpp9Ai$NYCN>5u@k$Jy4fWH^?yc?F zqIuSf)=Fk+Njg_)kUGX3(laW^k6N50h%IhK#;OP^PGeVKseV*-Mr(QG9#$b{%K|v1VS&$S@Nr+amnS`GSfQKKsm{=BV=z;%(&3HvIZ^p>y}2boK3TiMoaS`eAY} ztQQE%;#w02X>x+YbEVMwk`T2F%H&OrACIX^kG8#sCRyL0_ub>Hu_#rck<%z*D_|RV zqM3SVq+-;iOSd$CCh0AohpK|P^y9A^)s2(B!hKux)OT8sshAW3+8z-@c~K2PpoTfg z6B&NU?x;JB`u^Jt-w~<$E&ec;0=XcR=)^E35(4*cgx($T_l!Ig#{*e>-?x;o#1|J;%`sN&umc7xgc`A~Rf9GSm-0k~ zvOTsi~;3#j(IeX7=dK)txY>0>A;|E5BgG@vHA=P~WUlO8h$$yos;VJ59M5#>lH4A>_%1hd7QG=ec12?OZglzad zN*du``gyM~WQ+fv$5UzkzVkxKNtZeZv&*f=)X!v$apjGc_rfY((fim5?LVn?YTkXd zrUSM2gL;AcRI+A=$FUkN*4+86Yz~v}FQtFP9o@5%`MmU{->X=)>^pPWgLn4|;ZO5^ z5%pr($v;|DQ?AzPrAaA`-Oo5D^vBB_4%@t_Z*KT*4QICg4N+rEHC!XE*2ohD74w)NoWE@P6>QS{*~@- zlpnmga|A@cu=5P%uk4_eTaNN_4OMHxUHl=i^fWu|lnHf9DCJN!T-dk(V)o$NZb=hl z9N%OqE>Cf5Z(8WnyPFwB+Y{IXbg+bg)%hGaaIUfOx?nCrKJ%y4!*ykz_nqtZyihB% zs_pOD-D-ZcztNSkrAR6*s{c&Kze1OEJg$|`-TUj~ju4;Yq7HNoYlsH0H&@k>bvgqG iFdW@}Ql#@W7icqqJl=V=>-4WXCO0i^RA0OQ;(r0Hq^#5c literal 0 HcmV?d00001 diff --git a/priv/.BUILD_images/git_clone.png b/priv/.BUILD_images/git_clone.png new file mode 100644 index 0000000000000000000000000000000000000000..d0b2f924f7cc98ee96907e83523d0e1049d108b6 GIT binary patch literal 21588 zcmZs?cQjmI+{Q};LG%!WAQ8OEMlHllYi z>hwGLz3+S1{o}5MDa#qoKKty?_j#W0j?vdurzB?~Cm7^0@L1O|1+Ljpqn#@bX!k2)6s{h|N;gDyg0|9~2 zzNV^@@w->YUGi>LBUh&&S51ZR+dL2W(nWore(H(2DHx%o*Rj(NsTc3(8oeDvOyZ

    {Y61$cAVVVX@Lb z%C=AE#{y(6RvYuBm!4hQLdg#Q9#hoWx}#*j=-qzNPox4G_y_#g!}BtZ-i)mR9K#SC zDJ(2}0ShK#&&S2q4hq*QeLWnHv3LG|i>YIVwP?)$dsE z0<`;KAO)8j;-3PogU_^r*+Ho1I=FmEYANuA{X4`+i{~3G^E+@*<(Ypux#8F8vXbMx z#Tz*F*#W>W)Pm7W#B(4`Q#{Qc6iF_YJ9iK)b^b*z0ij!xG`-Iooc6-(9NWIj!&}47 zL*3iqJ2u5nEty&nNBy|*ayc0rc}uxY_UtcP$9os512?FAiVx~su5~;i#tNg;gteqi zs*kk_Qr{^@Q@<<$W3V+tNlHVzJOrua;tJQykkBo3;qEl~xD@TE0;~9TEpEl(1N!LXUz3W1d|euUQ?#$j^T9IteJVu37d) zQsJzvx=cS`8~`SNT>Y+n($nt8DH(s!wf&8Mp8E6GLJa^wMG0f$1&{~Q0>=DC3z*_2 zTj+S?l7fsTRyILwa>GAC0m_{~+6Ty{KggWbKYX6cNN8juS4%wK>Cjl@2(dWi$JSV) zc0xbgNVx?Ifimny3}&mFB~UM5rt*@1PG2` z;gvB#Tp7N80R>;|0(6Ed(3r+Y^MFs)vTd>~MiuYUWnL&QfP5UKY41lH=Ys^dwOT2b za6Wo-7{o}IH01Z|w5_p>j!(%hW3`@LW1}FxF4K2kuqD&o50_1U>>NzE;B>3c8_>H4 zJRryugl7wACZZBLW7^GXxBqi@IgsfExbG6Ukbu3zq5aus5Dd~VRKUQ2@MzdE%w*qR zE()Ib1!C!(_fR+{FDg0d9oPi$6fsgJP>fL##;zdsVIC6(aPoMHP&hRj2844Q>LmI& zFyHz3xx}Qb^j>QM=9B(%8jUAx1wF&Rni4t-SQ>g0{dV)v&r9O2A9k`JJuR)b(XMdA zgMv)Q`g)_8-$8aYbI4}Syh3v^WGJ`ZOC`2%r~U>iWWyD<{J0VPB8?kr;(LxVx9Ya= zey>TMR1uD1OdOLRQ-*uTa61EsJ@&GAQq2sfxOP*JpV)}tZ;NsH+Seo5qFuZ6G<8Oe zmJ6%udNR$V&$%RYCdVkZ?V&MB#g>K78eo``3rqdvU5GYpc-q=DWZ1{Kz&R<|4V0AlxV`?!l z>a_E(U>NZt+!F>c1C(6`w3`8ywlT2z?G)1wRhZc{)4~4R+hdQ?6(|rfbDn(Ln?_-! zA&kT9Lg|0qQw>Hq4~9QcKifxE><;G<$Tyl@WI(3cAw7L>CY^lH1LnrN;&WJ)Ex;38 z!=lxxaNOsQI@;t*6&Yyf4GfPwuK6YPXOK5$^N*Kyqg$3*Yq~StjD#Q;O(jEPpbt-7 zX^-P{%1T_d-UY_Pc%!y$kbsM4ib<>gvR2&;qU-$E@-tPgO$y1Wt3LRfpyd8d4mqE?UPw3aMvhQ-!PNXAKq1{R`sx7k%|^ofsC(hFjw zYfs_?{G=UfLVeHmP#5br3^Ra-G_9)fdXpMxRRN*jG!2Oo+!DyTUpBt`7NXwM0h7+_ z5iTO{03T+(9xO?Mwfl&Qj>vREDxAk%<;eA|6Q|z_wMM6U#Sj5w#>xcT0j0H`u*mY2 z^>dLE&EH>B<8J>!a_VZ>*XOPu##?QYy&EI6jHorV6?jWt##F3wpi6$p04EYC!s@c{ zqbOLyTkyq5ev98)*S=K)hH@rkW=P)lCGie=xXdh;9JpwWOx`<9M za7js;T-$8oHp1nw=X9qf7v-~;G(c)FE&MGmcRNbOj?71O(+S}qVO(aw(--yAH*#E~ zb|9fJmFt9Wc;4`7i7-L!kV8atMhL5jWi?}I^V{prIof&=r&*9%trw0;qIp`8{_iDL zBL9-lSRXf&j*jg1ylVL2{l469bnNnhp{k#cLfkj!ELDWYo%rY$Qlc?g<|^D9Q?{GB(*9GwK9F%s3q0PgxpD zYUYvwJ|>a}XxZXKm#u1BD@aEnI@krUMLFvGd}TBTtozv?M8%*cO{V6@PVnrcZ~`EsbTU*y29o$BMyRyCA(~>!9K#h7S=dL}MH( zxz7n2xrokSABy-;d$Og?7=ET{*8<80!ZZ;_LJ-wDEM>iE8t!wKJmAN9%DoF}nb*Q2 zPnjnsb4mrJKy)8P-(lqu8JpsQg2$mzcJH@&l&bzsc;V0&n+p=WtWVY-6pmL6s&a#v z>qCfr_G6#9Z2XvyMe24%aCx9_rB$}r2!pMXt|R(QpZzu>%IB_8r=;h$?!Mm{6scuB zZCk^_<+7eclml(u zpb=$-{_8;-0wd2UoeCizc$%gYVt*tRC3W88g&O2Fds=bl%H{#7I94I_IFUJ)Pi{7F z*_Uo}Q|vu8k@)V`mO#n`d?tnO+H)uT&(-Ir_sNs}ywdB_9-Kd@eNix}KA$O52zL{O zE1C5_>JgKJnE##)mY6H`<-DH5VrE~}&(xoP zm{7_hqsE|Fh2YUR`EHVkxoB=*CRA}Xf3##J0n=ScBYhqM^bx)@#Vp_8H%5v zEC?&!*j3H_ciPAY^&EzNyZbq)e3ZyK-zT-YKtW@hWN0kMDOkO$^(8g2U#9}sHmUm7 zrX0wtxihql=ieC$Qn{wz&68LrVP;;V^lQCZj$OQXbZ`0B)5dTZqW# zPlj_uGkz=y^U>@gH;oM@peD`khM9E=tF51y0BYsd0B(2XZ>xI-sVBsvp)%=^?~M%7 zL+#lK6;Ak1VZ2|)(Phb))9`3`c#v(ed<6HSRr%LekTU&&pgmg_FBE6I?BPp~z9Kx> z-GNNci}P4pMy?x`wZf|67&!1(cgLkzNaOKY|AU*wkbn@aH(-&Ar@vWZKCi~KHN5hM zEeG*g5Z`e3nyi6H~um7&(Wr)uZFUQ9$q&#>n$L&6z)V=A?^zwozb2H zad*$}8IULEUSRq!|7LdvdvU4w!+iwj0rRAW6IabAPnZn{C=mt|eyQ<%itu-5y`Qaj z6SYr=S5k4kdpw|-Qm#yiR`z~Tl|IH&$>9GC=8 z1a`)I4xC}F)1XL&6L~|#VFCzHrzUKT_3_#WeEummFS_GsC#!<}eOrs}xg?OG{o?D@ z$e8+Np2Mo%j{(lK^Nh?gj`X#V?mW`(YZ#&I5Cr8PWbL6k={v zb}0H7Mar98pB}0{?`Z=ZZ@p1P1w>6>9V#f4?HhOZoL4u0Tv0=TGR`Kyz{W96`*)i% z?+Ivp9E&1oFsTvmIHIrmh0s;Y>pr&=R0w8ci(J^%8{2RS*`-hhwKc8@Uk}>7B-KgV zWwB`N6%9jU+%QLZQuN-g)MD&|8#@FLsp~-QmRe`T6GWT=EP3r=*FZe5x?O%Lh6^@F65h2ZyXs>9d}2tg3LH zE#qf9VomrTVX?x;=10kkYxSP3jnGH)nA20svmuAuff+v&E_>s#g^`3m^1Yrq(7 zOMx0K*Qt|OeCdt&dYQGX+;-or;AEg2*22)}$G^+GU5`X1FTU&Is)MFk5NPcKp;xB^ z)sBrZA@})*{|N~=h*BW_2|VM7z07ar*3v_hukkUK7AD3y%wI9z@ArSwGx)lFD1~J* z+_bL?)jp64ggnZa5RH3r#`~U%+pKIb$gaM-)N`UMAo*F1&}{e-S&uiCHKxG1%v08i z@$|THvx~7hV;b#GT%`NE^ifTzZG0iYn+A=(H9APvM`-Xpqyz`a*^5boJ1Y}j?!#oFmGmoiqs{2s`lCJJ%X2q#0NZ@pU8E4QcUUqhA(95}O9yhK<#KWY{-H?MWt zG8$Fy&lr{NMRscjjs5jvT}#f&LO)p&A)A{#>AFGf>E1c$e*hIuLa$hu)^}row4P9x z1U>u!q7X7sUqDfTu6q`@ceYF#-*gc^%3}}!5y^`lH1n@!zn6?}p6fyCR(Payqyj9@ ztWL6m69$&lZ-~;wSJAyv{k6~;@v#D1U2~L&iF;h*4J#i+xT_9w>}B~4@=C#HeGb#s zm!{1#FtWBLZkC3Vg%nV0#)zhur*G>Og=k%bf^>hRUzqvEEP(O{8M&?TI^|p#qR(+U zupRxxUv;qBv8pehO`vdKpi zrPFtE(l7fAm-gG8!*$)I_isK29lntb{6}%dEcg?_ejUn5&K4zZHr-JqlwnZa)gZLz zAK}0Bk<*>u1b(ZUaP(fIgA4t~86zss|E=NPHT+-Y7RkW%Kd4HmsOBJ!jA2~Q(*Uwe z`7m7^_IwVhhDt^DE4-rl&TrD%e$irlcI8ER6?EB2n^K@2F>~E_a*Jxa+wM1`1px(9;#=S_rV^X+9*^+?tlN z(IwO)rpU`d2F4nz%j|VuJ6c$MBfA0;w0c6%KG-r+4n@4T$$q_&@hjg#(n$gR%QM7SQYvq5;z}kD+Lzdtu+lLW*j5*JlGfBAR zG9Q_mds7(AkZ)(FZtnX8KW?b~Q@4kPeV>#vXg;o`gFM^Yg8VJHLhaVS|NNL`y@W?} zIk2*oV_aMhaL3ytuf_e~dB&R))Fo52$U*ZUqk3nk+eJV!H#(+Gy3_|t*R&-u+8{v!G0b%-EE5JP%XC=ftl{b_%&$tQY$uJfanbQb42e1Vb2Vl1pK9^ z-L8jb!KJpTGsFiop~2fyF6~$T1+=43?41bUO(X1nL~wTJukFzU5V#sku}ppMvq&fL z&~w^i@?>g~Bq+pWzHGG5!9O(z=DE%BdfNLpktIJHQJ!?kh)SrR{#oSIQ)pcEf`5Y= z^V{CTN~RTuj}#%hW3e>Rt#brny}+n|DX1rXO(Tny=FVbQ`_?aD~T?NU@D2PtVQqzv|?kB0=#^wip8F8TO#mCGv@cxc-iW7DHoVNPDe&^wH%!yEKnOVMr5i;xCm{7& zs0wiVJwtj#(92N@-Y>8%OpKT8Qm%rh3Rwrkg1!xl_ldFftWQxS%5=WWc5fMa1bh5y znHz51rr3e>RUTcgY7(vFWrm0yn~``Li0M%{-E+G=(<~QpeU%PX`#?BNR=!`GpR%7R zs7oUpLDKJef%D{k)Jnw=u1{y;;7(MA5a1YUpCZ?@?bCfvlEJx`1108Iz^d5@s8xnP z*ASHF0dPEqb|u37PxH1Y-Abeg%a>r21zl>%0-*i#iV=xJvny-Pr6HZKK(cke)!^Z&NZ>hOqBlYS-Y|^_lY!E*5ASM7#-@zb6ehibK!}%8m`_{ z4fwdRQOv@w1Bbma4N+j#S31anGKVu$7IwxzlCIK5fD?^Y?+BKLz z0!sAe!`HjTlhqhzgo_Z=cH>f<4$#@|d7NUXTNg3|2FKeTNZlQ+!&N86ihcWVoO2;H z@}leVWyUVRSor}Wpy>G$qoDbW*mlOIO8>&djX9!QvkG9%9^Vfv7k>bu!{G6o*k|Ct z1ukZ=?IM<9bYu4b5UhGTWOtN`yFSy{o_^Z15;sPzeZYV;8?(bicFF!&yRI^CT@#g& zZ$is_ojS#Z8P#Ahc#kAdgF^Wf{WI$SWQ}aXOA3%|S0gcSr91HN0odx#XxZ*og%!AL z!(EV8=)h@H1Nc)XJ7X=u3gI&;A^-~Txm><)*dttcyXrodFt{|>Ejr-as#a`MSC?B| z`u5}qgnawF>=fnmsD%Y=_ywC%(fHw|8HRCrlL^AK)7abVnsrOGw^GW>U47+G&!|P0 z1^pV`XqkB2UDlZ*ebv@+o5rMQjU_l?mVu|WuKH`{C=0(Ri4svAbswt3cE8t27AVKx za&+PMu3$okR3S+h5P?q#tf+O-yEC+=iA+&hp6flrlayl1+W6qj*KIH88~;|j>I5*P zTMh3~l(TgX4E_^pcQ<<({}*`F{|@`7(w^gGmf*$VlyAU9%&3|8jRzBh#5c8EfpYm? zn;QHvas5_&8kvhLcjlUi>gmj=@{vK6ojP&RH83@s?(HUpL-kjYdEP(p)_{a9Me@%a zSr*wkH42kNr>7AHZ&-^kw~AyR%IKB`sw>RNnD|Qs_&ak6^y5Ew^~*gj`X?)|z95Ne zmO7>~JYGe&hvMoid%^OO>N}8tN#RDbwt2>nfjj%8oOQJ`HW(AH3k1W=(jzh8^Kz@U zY)9Xgup^<5#YoMJdVf@N4G7i!@=&D?-Qx-JPkR&h48p$js0o^qEM)Z)=CnzNfIc|Ilh9Xl}v|NU`CH}<5r(th!iv@YhEvP@66ICnUKuZrCDXxIngrQ3N@(uSbCpiXPGltBWQdM{j2{Lh8RLNcFN zm~6GfAusmm+$oGdV+87aAH{D0aNO4ZM$LVOCl)jfs%{>|cFA zy!yb=Xp&!UKw5}BYE0fgyA>V({Qi(m-x9nG64!Co{~T|;j{aKfqW?Q*?yPlix^oSz zMjvGVCYvaVq>Cif`M$YO@ZeKRGsX~p{uh&jw~z)65`WY&K-FK|k>X(5`X^qXb#~ON zbJeOyp^ZnJ>m>~lqN8_aKQjn(=i%duh{`P_wI+GK$bP$;u^N$Ch9}`7+MNA)YgA9J zQ|WqTLZIQLk!80ts#0?$KT*noX3{n$qW~*ka&eF)+qNUM(8o}wuONBcKwFIc^~nv}qfWb0T%Qu$*VQ-wW12KXc_BC!kz9Iuu6-7g`1&;=uMnsvlwiXk3%0=` zm>ooxDk7IHvyD2K=`tkI-VgqNU532 z)DX5iDc3btAxP4$A)Hw%x=$6bqS?5SYaevhWFsh&{FL9lf3<9-WW|Z$}QUY5bNbr zHw{bpG1+TRRrqlo)&ew}Re-FQ5v(e-LG6nK-Jtix_y5frp>>;YIuM(uZmN1it~k>> z%Z)*%JrNO_oh9NIk0M*y0POfgmH0VNtS!@PjVQ|vF}!pMoi%;@A6l2!)6(X|5M{e@BdFTP``Tp&uP2c z|E?hdzyAHVypm6sC__)=&wcZKDnrsp%1!D^&n+Ye7As1F)hSX*iCTIv<$@~Co03Bg zTS|dBgl{TH*52oS2M>yl3uXxryWA*!d%|?@r>g2g%{hF18?yC01mi+1S&Zfz&4T`~WgM`1Y;bqiUzav07~-$dm7Q@Dx7u*MH_GPzt>j8SJD?9WU2T(8iVqox{-maQstQzp@|2J<_Ig?^^iwZVuH&=< zXg05vbe;QAsD3ScAJ=;mSOk-Sd(iVzJUvEHhd+k&Bq@Qw%J6~%$dk?ccn|b_)9o&W z(ijp>x`tR(#MG_UG-9>4Z{7z?RRfq`uDluvWg1fo-lbGlt|WB-9=lUZk{I;ihWZ=b z?D8G%VIJ^djacY5?OK91~AIqC@2cRM3wRV+ON%gk~ zTD1>Lf79Y)H|5&RD~tbVe^6AfR>uvO0mGefm*~39d6v0eP{8V~#VD#MFT2(QB5`d{gsyk8jF{YF$nJ|sKO8Of9yhjf@>#BVWbq){izBgR@vC97EbG;QCx zv5i|4`Yg^iGq8`1Huvw{$7eGTzvv9c^k8qSA@!TK zaq?D&Tkmg0kSRipr<(XFjH0?HPqkrafOP7@{pP+WPgIf$1lh{1vF@m8Xz6;b{q(-Z zW2WO8vr``XGYfu&297qMNV1h|5jt8!^EtHk;L9=4y-`-eRC=h-fbXdUV~7Rqf=R9t)Q|TAF?_IuP_!LWIlZH{fy>KchQx1OMRk7_*EpPo zD|-LDC&@}Sw+<~RC~1ti+{?iQ_NwmVDwf_*AS2&rzC}mh>$tYZb{6)C18GoK$4WxMv@6j08JPzCjOL%S;0Vr!EY7xO++9M;~ei4qBLA!sJ~ z*tXw!f%wb7LELjrIUI=+U}r(qKD((Mp8SoTQFH-1H<;MT&}WFD=ferP!P04#kxiRh zc|{EvcZZ-x5v{t=%k=d>((O+SUO^7PFFQu{NV3tb9H4ZuNVDtumzb^TFPKup3-y9p zsN`~$G4-9B2j^zVSew4kxOj&($Txs)GzwUF*Y=QywXKoBIq+VX2<#$cXdfDh{+=2S zVmqG{9XIC9eZM3eV5bPYOlbG3X&zs% zSQJhqvQfZ|Rw(XLdv4ukP4fLQR?qVWnZw%l(>HcZk3OTT%b_lO`Sk13qhC~C{s9j4 zAK*YlB3}T$BN?UYFelQU1dDJNYrA! zvYrK>9jeQ50TI=s(G;E;fu4P3Y&|F#^abMF9ZhGLIIB!L>DKD62FH-T()tDeyep@> z7Tki@@?u8cecKpteHPH%n;Q0k>=*m+wBM_) z>m+sA5MT^H>|i*$Y><`?4>J$41hH{DLLMUwjC?aw)|Z{Xt4qw0oetlBlJxbxAGrbD zKv0o=_Hb?eFku#wQqeS?DTwuwNs0B+em>h5NZyNvuu)MhYvg#JyQ z9XfFD4ibE!KRR&@q63n?4RegL&6jmonqC(Y0a&8({2Z7eB;7Fh&5l5*N?*{YV3fNK zgwn461E}CXfEJ}%hwhd7)7T2H{z2>J4!)|GETsAt!e~&V(HpEiI`iPPTQ`%||9HJC zY1n{9I@PcfNY?B*iGo_1L?^)sgDS!&6!dkezOrAC%E197 zrvP<3l>0w;8oB;2Jhkd{7B$Q`Es;t9nXI3hq&e1Y7m|~BnvV-X%z1iJ zjl;rG*`f8wj2ncC!V$c5CQs72Gpb+`vWM;YsX~ssHpQ4uLb&JGa!yMfC>Eh+4tN3O z{Ndx%OAvR#&*u_jb#ozw#({^ z3wLdrQQFBs>aH_8O`y&DGFb6{?%;yaEWN$Txgs{zf4 zCo}w39+Brsco-$)j1kXT)Huzt3YXLo-YtSY`^-nq=Wdf8SW4rKAOpKO%}VWkP2s%Ch>d)h>E-N|P>T)QLzot> zf=!$7BWJB#BQIjezFj`%!^eL8>jjN%dyP_b((|Ew#3KG5v(Vod<9*=rvydV78>SEs z(`B0+SzL@~SJN-lCLt*$$R>Ev58_^iz(CZkvb0nWF;P*_n4@ z49huwEi{ODo(8ts{cCr1@%s8IY0z~JG&1mVxomRil>%kr+FIzt6%e9r(X+OzTP7T>dj+7apG|MV`AUwJuK$9*LXY0Sv-nU{H*p^VzBX&1EN~LTTmSKv_1h15Lu+K&doHjEOBT5+!oloBa z&A4EAu}L8Wd)GhVJ$(Y`)(H;zpO7k#EK_mrfw}b>Rd(S{)$auSuF$++M!pzr?4;J{^FvgRc+JlK zf9U%E(`H}8@ssSGToh<>uBU=Gz?5dq*v&hCYO@}6$z2+%yO2`uh?>Mte}V)c`0Q;t zdD7LRu5hOX<&hLX3D+~TD! znAuK_CXqBP=Cbj0@q*0UqquI~*r{G|yq+wf5(;VY+Ev?LBAxZAs2j9UhI{k|dn0Fs zKCx+xiIA8+3D}W?*N(Ryr=iu-KrRM7?XTaZN7@SQGb!sY-ao7ySQ2Hc9v5~g&9HMsxBFwA@8%)P-V*%tjyb89$k!6)wsj%Ke?grpK|`qJGk~k zI6c{35?R1*-@xPAoZb{#2lBsrtY8`SAr0jE{aX3RruW7QH&SBUUGW(D0_!gDV&+z+ z5H#1}@bm@_vUg}-8ExCcxP8R;Z=Mz?Ai<~?p(A5XW~}}cdD>ALVd*}^*E`t2oLlgR7XoCf8iOH6mY#cGC%%p-K&mb#0kVTI z=mB)>RJRI7oglT&4vW#I8|4=j)=B4HYK>vkg~-X@b1Zs-jw#&25|{RA-W-c;dRSZ} zCH!tJSWu|d_ipXrHIoUt#N9PS3?ht?*KO_s+9%ldD^E3*B&Bz0PXriCmbmFpO)c{R zo7>>p^h@43=LLpvd3i+FN_8z$H?b%X%*)EH(S+5#k$Cs)KCi`_*8aZ^$P5_PV>kK* z&M)aHL!NZ5$95BID|~0QJ7^a~Y1h*82EC+CFw?X0`_5>)9Ni&}P7T!sXZ-Sa+0Tl9 z4Gji1$s={S%a4N#LLlMrns1q>gUdHw2>D$x?YkapY-bOpA3}D*pP(*ugzA)6s7XbF##co4$2ud_RaryG{Zm-e-23 zV~O|_6eWC4tuZ0OK<^#?0j(JL^N+#+*`y8IsYw5em0+GHZj=qGN{YPX#);U-s2@O>vskN{ z^cPesN85pft+w5D?uU@Ov*5<7B>HBHu3t~g-qAjBsZH7b66SIVxwY*ZLAd-iF#g5q1+z;`SmJK3U&JE>D-mVBzh-=OpxcpNq4T#~T!(vo` z?eUi5o-_aU+gn&6Qr&>&t# z6f(R5r92CD%6NbZHp)n_T@qPRAauaI2(fYa?ZoW%rVv@1rf4bu2;Rv|Ah!Z@!BI)W zwNsXfNiO-vKayumFGa_jJ0N}<@WaHDKu*&l0+71nt*f$TF_Mr3miQk+Eg_7h5dS=g zvNWdY)_h5Dqs-q2#eC$!F-uYk09RK(HH``({@m@<@^P;^ZYHU&bOm;9{p@u)RFvvo z9)#EEirn#0H&s`J+>64W!|SSf0TkV~rXhmvU#t&n^?|+__erQTBlmGF79CLszp3uz zv_qJ+?KYpy!CDoEE)GHIbJ$mUh*_O)7{lWk#RbnF1;qaMmPZYAtNObM66-<9k@sh@ z`ptxW zz|=$&0Y+dBTf0kI4{=Ssr z*A`W*IdiWR-WQ~jUup*cTKa;c+M!%$XmSi2ZASpJ=r<7B8SXwKj;k!us?^NEwF%a( z2a251@;-1*6^Y7djZI<*d~VYE&@{LT{_H$@E)}RQbiz)=hQmA4A51}Fl&@`X`9tsu zL_DzOT;CEJBgVBXqFxuOb+8Z?{3U}!Ig(<_EW0i4USswv4Ey47q)#aGuPS*?*k|m_ z2DbM~&p;oli&a9&T3I2U`IG^SnGizEkh?R9uun%}5pV$tjasHfv5iTJo$oC$qu7BH z5FQt=*X%I9@`f-M#1R#~y=8|S!eM9SLsZMsmJvjKS4YYmJfouyp1U2_TB)7N5XJXI zuLLe;X7cnDb!VSm^1}k0^LtygjU5U+jafZfDBuFlU>yR0SM z&;mw&P+5G=JNnl*P40|L$Lss~9LseRY+lgzl%ltaQ4beXqpn!s?pYPXo$(O84}T7D zy(p1*H5(Z*^5i$>;v?%DqZZN+!P62xjHcLBJA5y>c}MKyXd?%+YK=eK@9a?vU>-63 z{h-Ze)7(n~0Khn*9j*OIY`07fT2_6IA(Ss+KJs2WhjO)0d1;+BGm;Rc;WrFM1=-Ts z1b&&mU>H?lN((@t4!~vP+nt|I_pt?fA{>tj3WHG-ZP-XmM&KP*3OXt$G4({xIPYrb zzWynx#iP`Sxd|r!ua^;BKf@WwC_^^<7oB5zDbCg%N)N-(+tX|lr$1bd{5S4Ob7HcQ z;q~T!PPq-+*kc}~ZYPk8ww8u;x$KO||LjCmy~}|=uIgkzIy2Y;xUhjnOF}vJ>hg86b@~beexjOdmsGSW@`V8I-124;bziJIDCO~ z52E!(9L;r$*Wq{BI6jQ}C~;6S+GWm5^R<*j@Ixmy9-vqXKCNj8=}oG>r3bvmrlY%9 zdm2{l>K}z3gF-7hqPs_1ozbvpQ>CkbquuEIRdnbAQp!IvVS(sY#4*@cJ&eJAgDlE>}27BAG8QLt%VIQUTA{=xmrkd^E_>WOaa>X)SwHsv~S{%%`_qXK}!&p=1d22YK^4@LdBeuUK8Gkkkb# z@>dV-VaC891zTQze!%%sp5n}AK>~ZM-O?5k-$3@ME#%4_G*WW_Sb>ZU?t-PywI8R( z;6j4IhA#VvZ#LL$l}F@sEc>r&QY7ndYcZS{*SJ!et3PNZ{*%eQE1&CY+w2hVAr;j{ z)_i^bJ#1ZqM1oFZ`fGiyx$_9jP59dDM^{kyrwaRl^7bX1aI1qcR-?aSj?gs35hy`J z6HFX~n$cRMkHAN&OV#N3yFt$HKgkN3K18;f|9R9_@Dgta_&2Gfq&LNW6BPU(yS(P; zNnr@thwU3(tLcEMHZy(jcH?zvVZqFy1L)QxW8LgYot=e-l zG3=Z^2HbH!=gu8jdNB+j=ar(C0;;9{HRC1wA#{eVAm=>p!Fa15So>#v7Q#)}+p@An znAg{@cHNmW1d5e4Jw9aDD48#yr3GVFT^(#Da0;KPxc4%pm(?%7m5|ljdesOz{L}kp zx2g;7lRsxJZj4ZkE=v0w^UB*fw@v=PpcdF1u@A(3a9CCm@R?ccTPY(WCc3|Mx{col zTbw$J->mcAR8*g?Hi%z+#8~=cc^tX1PGqLMq9eQlT{?Xnsib-ZVCj6oeN_z4co}k@ z15tavcOdG=2ci3&@g>1jAShdZ&F^dGFckgLVQsZd}4^OtkEyN zsKwoCxRqRe%NJ+icFogVoJe%vl*wPcdbo@}^QP{pM*afuv&YY=cG>rQ)+93*W>sI% zYhOJvL>tz%d; zXBvNEHsz7Z#r!*Wu=BpeJlY(1wXBconIgvxot6CF88&=W(6yA2sfOV}o*EB1$w18+ zB@4RGmkG{sDzwJTX>S97dcr_$V;PA`jXz&S%%)rmh!Xl>72*5V_0v-}YCuk*BC%=` zV-|2Dz2j7_?q`Uya&s-l;_-;jEvxkI+3BQq`eJ+cJ9C+wqPgg<6`TIrd_nt%Y^ z!NI16Rm)vOVwH+cU4iXj#azYX)+}^y2M?*UxU>xf1c~SKdSI@W&0wyHAahY`ZW##O zk;g?nXb#U!j4E?O#5J@8IzmfLd|f{!U(VQ4P-#`NWbk@6S00WoiHt>h5-QxYK(H2v zKAWk*S>f`wZv;_=gqI6W;0F#^2vY<(oh*v6Mq_TP7=7zVUhRI@6MyOW%yA#62vHLh5j`KL&T>G5rNl2TLm-34Av}Ltm zShpZ$i|wt_F>ss^CRqM#8s^Z?q#a++3AIeRmX<$%Q2zJYKtr^vvlZ3lBle^E=RbGE zaMG+|Epnd94WA)&UsC^L->@jeHeQu^d*4hZn& z!qx5N=Z^v$Vaql>E#n~Z9PL(m8y_9Wy7l3;<3MxV{Quipv zL6)<3Dy+fU-QHd7zqZ`v*me&q*c~?uT_<&rW^dhse2G(x)RBmtcP~4^x@y$(1Zx0sTAscS%KG zgoh(HhMcySt9=O_7(5flp(C=(rWm!K2& zez8+~N*@cEZ(3>wi!I$%Y#Zz`K#j?B88IY1F6CVRnqch9udg_K5`N=MtSP|wny&BJX74~Qoh9Ru=NT^_ z4y_maBED}o!`LZLbxR;UUS;+D-N+_OwIHe92g~fe{}b>BiN=Fuadag$frtROu31~2 z&$Y-qg~I3mPZ#GM&gR>`@fxwWqP6#E%unYK)?`s#d8{v5FQg6?<0IiqVKo zsT861sug<{HDh}p?f3T{@Ar2c`6GWN*Kt43bDj5dU+4J=qyCV#d@2@IN+7Q+3HTEN z<}KpaG`EfoN;RB1lkLP!`6{{hnlCq94Os zb6TreaQ9k8)8q!VQX^X12;r}=+kmUyWBlNMFlx`*;gGqNc9M3~KI}-|UdZ9cU7}NW zK9^jX5k%vm?p2oFbH=it=E{-XJjm@yc0Bcx>$z7_`_nT4VzwW(a>MsCChHKTDz!f8 zYY08RaBsDXVEu&Cb}Fa9kHTV|!2V>ItcCaQ;Rv$y8_mKd^dJC;Rj?Xm$O=G|GEcj* z2Tkj#b?Pn*+4JV(v~}XBr%uGaiN__phJ~pdPLrlbwy3c8BFs~6GFRPFNfU|lXK=|< zev7BcQC2p%pk3(`|E|Jj6Mza-{e+?`3)Ny1-3+ZaXOTv`0kdrpwLsp)#q>NA$gDdU zf~Q)6A>DU^%8p?unK6nj73l)zuucMjw1@sV=tw_?RO>^B4EGHGZ$t68-@pSR<)j|k zlkYhkqWpLMo|=?0^W;3`lmIg*WasxGOw?J>uJ(lK4it#s%hG(?+FpPPK|P@CFrN!B zOYp$F8t%MHynDVa1?{@tR!UJ}$U*eU{Lt)`1;aKr#){#M{oRg^&P|Lpf>(d#TAU<2n_o z>DeGx2FG&MZSDyw#*49yfV3Y2`6N|vK%4pc)z224y0RoL?2E(>bB*CGyf;p$g?!(N zl&kc4bGS@U?7j=W>({qnbn~pt>oh8kdT^^5@YZqkytXT$FE`Y+?7X8F$FWLZRxYb!YvF_6XPX*H=pCV>6h*=YmX?pb1#W&} z;H~~lQ#(pgJ|QK0een~F2u{!ZG1@+(MM@uhdx4bTq14V3YTc_#!9^P=#r9RK$vKO< z?b);ChjQ2=nZ~`wMiE6C_blhog91R0>x{ggoh$=-+=;!^W60CZ_YdbGVIPgD&RsMx$}W|>TS9xPz--Dyxg%mS-P8h8uG(5>wP_wq-xNgh*btIMV!Qw>=?!d8wNM)qpnIp1WX0Xwf%{ zfdYtSr2iJ&AA0py%}#|6-r2DeuxNy+(JW`aJ9MT!wT`XWXSh8>ACnk-4*$RtVY@_x z;&e{6a%1a0?Vi7qux<6_tfQOA&2Wlrn;0hq{2(zf5cumv`~6Rl^56U%XAQ_xi+L|R zk=cvB^P$CBqB5@DCtr0NL=HUHCsR@YCXRPAN6q;xLBT>M!ZORd2M|4}T|G z#=h-yXcBr@CvYY;Pu6lYn+*S9K4NRCyTGWj$VrC3!bcACFu#AeA$aqMlx)4dQ>}b+ z@}`fOm!MLP8f6ZlAm1}QhLOx<;~wLdBw);+{8K`JkvCNwJ7=K1P6;n{sMw7?kJTEF zUh)uai(%RM2u&}Ek&H|9Msc^zmO8YwhIwcZnCHm3fVknCS&owd!*|Y3DM0%KizGR6 zUjCKcS=Z|OJM!P>Q*}w=R{YIqa+e}u8Fd8~yR>A$qI91c329nP*{fDV&JZA#(qqW| zUD-2$Gb5lk-pG&6K8{VvrVZOgyUZ)!Bdnr`jCifS^&O?Lf11wFG1VFh0A`>O{6H2j zcoi#?8v^KTs3F?{4}AJc4*6t_P3N*JE4jhV#Me-z!G*22o3l7SK1xrr|s(cFUS{KmW#FY*-%3*MTq3W7rCctVD z6w6W)0fR&B(Z5A_$H;1!3Wj;(^<*Z9%Uw9bLXKi@LzqJNWN29je>!r5$W{0W@FYEh zIAXeP=)s8+3P|mWwzjO4MRCI9o$Y+7+4d3%4P%Zup}ZScl52gkIv7pfa#%qiO$ByU zhASvm)ISRAClPEw&nh^_#U^dHv$z65zB%>6=(RPv1#0k{S_CsNEYqcBhu!{MyXZ zRV_eE1&+_9`BSW(P=d7evLzD3TdUbSHJ7){5+eKD$8FUvEnb2oA*Y4PNm8BiSU$eo zaXOyx-=(o9gx;&ny17P2oQu3jki+9UP_VcIp8vOfp66P&l1kuNzC|7@OcNPPfVp8PobnM_W&uJ;o z%SpcWOT#k>v62_}6KMG91+{xV;;(l!j@==#5_ikW6ji=`M6s?T*!Fs!1hSe4Uzhp# zdS$__uAQhvc>2J0=ZJkQ$&Id%14(`M#aX^Zr#Nd$F%iz~0HgwU8rW}*-@6e}PSy%j z-RHp$sq*e6NmP#`J!^t6ooCZp@DWZU@zRZEZ7K^q;9C17CJJc|I4AQ%y9!O*37mC+ zJ;9grd84mSQuv*niZ%3+dsE{wS~(>=5OUCF#dhjupsXlTbLI4QkX$ty6SWgbJDZ)x z3D?7PpiiP)4K)?8WzeIkB|`R64x?$*G|7{rrsu2?#s|+#gBGPc3fhIA847HqLm3jS z&!DdkL<9z#!uRRnM(Qk(FPE=$TksKk$^{ z9&l#4W7ZZM@nhRC7_JO2Agd7OTiMO_enGw7}K<@4f*en41^UI>wS zHLjQ3-Eaj2V2!j(JTF`ME=h9C;wtbm{Uh{x@bX40;? zaWw$pl@*QRJeG`wEQi0aB)G}3Ig|~oxJ)bIP6@;J=NGsvd_At;Uu=okNuc^J|5Eqs zBfn^0LP10AsE5kosX&p==yZ$t$EyA@YE-4>=;|bNkNmMbzq7!b$hzk<=9^($tHgRE zj2v&hmo)CThaFwA%Mwc7=mtM`6{4y2k)dKybt~aW?X-9o6chnF4)ON!N9A&N@&vcW zNQ-?a*_vC*ZqgWiTIg|Jr$aQKVUB42NtVMF5f1#bpNk0@omp>k-$p6PC@kA-n4hvm zW2&5b+vll%x>O(}T4kS2QiZ2uHD>rbX`gE^yg{prCJGVjp0ajy8ysIBP7M0w-$Wtu zL=160&r-bbDvHW#_Xnn{t9%8FtRUv1M}N@2Z>*ifKgOF>pj0!$YW zx5r>+EF=QRNl`a8(ayKD@&|t$5S1{KP}?>chueO(L;Rf3{b?I4ob}c# zEsw2f(rasfu+{iHMHHX7}SB@?QLVZHqGBz>~O?vFTwvCt3@5-FK>~ zNPS-*N8p!Q^Cg}wSm&PLD8R!q@CO)aof$Q)go!%(-eL&Feq6N#>2$QKpn*3mI!E|w z)4nEAO<;BEogaRKvBn23mTsylBn$eW@hVsoX6rqjp?lOj)kCxPMf_SHhyxziB5xIF z_(hYM4Kw2x&e>zy`j6$;p?s_Bpm5~^KyNN#SSqAK@j7>6dhd0d@M-Q3fn}C{Y0jC zF_)Pr;<}?pgW#BZB&=wz_dBPYGZXgSfHTjg*kmj^gCBm8>r=I8YFYCJ)ZGKs4U!Z@ zGqk`2=7uI&_8r{mFCTgRn6dX$d^OhU3G`-@UXOm{3$)<6n@4z~I;o=HORl!ad5cL? zw=z;p6cmsf=qY|XrX#CclT|gATUi3|8wk+lJcked>CL=$Xby0HH3MBhIs<3_Fjh=lXCLl{u`2#fw-g<&uZ1v zZQT;KLYb%$Y`;rpBAf2PumaM4yKX~cAxBtzzl|)?Yw^*#0#k=mTBW~V*{eK6CAX%4 zEhdfE0OjMqd-QBt`TVHL{#M8dhi=<4E{}1azUXCrY2%*BNtEM#vmmlv4oEN3_V_3S zi(gNbJ#=0xtdu7r7KEIL{L_89@0Q=!^?0EcCGfGG>}o@W1-PQ=vixoy&dSLw5!Ys6 z8EVJVz~~8~?yrgR>X&ywx4n(%Ro>Ohu0QH2IIF(Ay!-CfpH`pAta_@1`?UfuGNOe5 QB+x5bclFc}sy49y0Phuwt^fc4 literal 0 HcmV?d00001 diff --git a/priv/.BUILD_images/ping.png b/priv/.BUILD_images/ping.png new file mode 100644 index 0000000000000000000000000000000000000000..514e94bc30af849688d0846008f30cf056933479 GIT binary patch literal 16848 zcmch<2T+sUqP7h-6hTB(KuV-YRjL$eLAoe}jtCg4D7_;s5$V#TiPYN#tylHVaGAt9lBqN1Qh zLUOK(_}Jz0CF19xNK6ayj?_gwZ|kRWl>j8O)nCkUvX41a3LYN z)_V3q+UbyIK|%rqJyCe9^Y-O-D#(VhZumr`I(E&Z^|*Rz|IP_({pmnU?i0HA$NQ+@;gdx*MRHkr}Ng zMLS7**)jv{)jx?(ba~=OhUWL8qO73!y|Cxv@S4Q~#{%eeGwd?7jS6%~MbRp7cB3)z zOnDJxN_Td9^ZheE@y|okod;VZLVv#JIBmt*nt!*AfL{YR@svQ@S90)8pF(@!gphtX zrn64I2}+7?73l))wngBbCTqP8qfX`seFva@xzSdwuD}9VByx8ZL9lM}L+aLvHi@X? zZx=z&URz#RA1=DSo-W@2bfk}`W&=B&v z{e3jQNoH$%t&Gose;$D zw7KRsy!cnl&fdvap4?LR!!u1ShSNG+*;4IqRx;Ziz>}@g0RHCtHmwggHWZAuWb*75 zs?fOf#mkesrHP#HhB5Z%B1h!=MNPE#j`w7@8pZmqi34}V+Q#|KSY2Ro(9h$ zZkbV~Yz!V_^eM%69YR(mHpY&u++01uXvX!`gLM=cTcfrUy6}1{tSBVIbzK)1MYV?} zdw$&S6H{YFVW8dRJ!H{NZK*s)-$+_tJLRpkr!dW$Vkd*T zn06K&)6a9A!>7H&9-D_^(B8Hq;Zwx6t1P^#f}^@wR+*DP&cw=l!&`_1-sNiB{edrz;g5Qc(aA<9i&zw;n*fiLakNKwO^mLK3Gj3P^WLWV1N&4-&%9g&< zT`SB>AHv{BjDs{31oFAYD`Ugjq0C@8(jcep^$czP+UHo?XGgxwg(UJJAHQF9)kQT2 zs#$a8VeXFL8o89bQ)y4Ow)QYB8NuP*rPk1iksiZk+-^6@xapiS!zI0f(@*-NdgjNf zxtp9KO;7L;+z-xNU(M|~Dofza43s9Onw|HicI<3ugYqS+`tJ0I?44In%%X7=yfrSuc>#Y!A|Vp)s~RHWnv~r81pt@HVT2GgL7q-l zNtJ4>ePF*PV#7EdkQg08DOjFDbs_5=ohQJ5-_<@X;;`(YcGul$W0npCeG>1Q4^nd@ z;9|xmCy%roUIzE$UWRWK>zGVHH|@~x-;#9k8&fUCJcR@YF6*2GdwAnQr;Lu*^;60H zlF>3VvfxzHgU)C1*M=_blMyDOC2mO%p<^{!96$8-U2PwY9+nk~1#~Zbo);QoEWTq@ zu1S_vDqMz}qJ46h?V*F`btJDzD;b@Y_95&4wl1l(IA7-+`juMUEjZEQ!bU5#q@E3Q z<@4MO*^{Si1rNtTp|Q=sj`V}Op*IYUNS}=VIYZcrt>}tE7f8hw#)27Yb<#slG&k;q z`xrw%>5a8K?$o<~UPEQ=GQQf##!y)X@w}mqqgnEX*=e5W1L%_EA7gJ!$MrG1&_>hbY6iE$e~@?QN1uvXq+wQ64c8U?PSL;?yF`)&!1eg z$Mco+1D^i|)5XK|O{z_OLJFBWqEMtca5#J1Tlw)Ql>u~PyF{CHI#Epv8(B#Oxr{S8 zo2%}Yujv6K>Qa_@t8BYz_Dj|7H{$tx^B^eehHF4zD}SpCDiw zV9FPRV^G`bl`h#_7VDZzrps7&HK}~1g0J~f4j;bV2|4(L3jS)>sKTkOJhL=jP=B&g zm}+;@w>n+;{Z& zhnRYQ^a~QJt3TbcPZq|c-AAtKw0VAk7We_S6#3a+XYQ^E6Mw)n#-63Wt*FnQiU)lA zz5KTR^q$DK{Jr{`?nobx=eLX0W>n1OA1FBX=1fL%Sr+p0tK&x!p20vXRL6jyYVZ12 zcIm2$YfnEp`yFT@9<3M(;;#6Wi@9GeCR82a)iTYoT>=yp6tCfWZsdW(9Q?Ia&u~!Ht|2#^j+B}P19$A*R#gYJC)7tyf(GcSoICIA^U7OUkDZ>yyY>>^0(fs z=YTb$+(^g3$zCwPizChz7MA^=mz>ZmtpY!4B5Cs5naIM+e#Vlq#4&|SZR+4(hCFaF z%GuZ5m$mgxddv2pTw6Njk+NT8v-<5ZKSA6(PDNvRY~HrkU9yKYa#9YT80m__V6Mpm zwAI2c&(FW_ZpfR$fNl@PbWjO`lCJqGnnr_X}XV`c1rdJ_< zCreCU?v!b!pz*K~Vx6LJF4-H_NWHkIIO-IL@FrECjJA5uRpG-M_4AdznonoL*#JFU z+%Wzc>Kn*;H2(yQp=IY8az6BKRbiI+u@6?X3(H<;sH`BQsyuHMOnR~ruR<*`Ll;=e z_QF{CUfyY4boogBbFpr{h@g(R{_kLs&v(v`xa@1zeXOaG+gf;8J%y+3E@#3I_Zo`hFC@42$S9_2ofYV`skEa3#X-&Wi0s!WP`D4fk$Jl!K9j`kn#S2ak4p(_7+{T{cw)hPwsR z@;AI24pr{lvGr`^!>fh8HonXQ?K&Cw5SN<$tlcd5q@>o%Ub*|#V&l+WPxl?7J!IPZ<~-m9!r!;hhi6SpG~QAaEL zo1oUakK+}q1S=}Lk^;Z=d=Aar3U;0Fcz_Gy6_<6qjC2sZ9&gvpSeCjfu64wjO%OzF ze1i}Nc3JEA%R{=lCmsr;^q9n!%kZY7;#ntA?Wmu1+3+5fgT2&gZpPQ)F30;ix#m3$A8{8D} z{o)p@)<(>ahh@A9K59;VH6dq{Tbp|P3WaC_osL3Bj~P3Nw?kOP_4N*b%W-+R8u@M;@G<9_UAX+GB@r=(JyDvg!bGu zBYQx7bbf_5tB|8f=;}>&J?QiebExwnf(=U~u2}!2ktx83D&Ul}nhzP%YED(!v!aqh z1xn#MG_REOMs`mgyX(}Lvi0U>*Bd+1cmlh7y8M?f;I`Kzz4WdYl9&T^GTi|Dq)gdP>|o7vG_Tnl|8%6V>=R|B zp%hV?HCd4{e({Hi>mg8~Xn8sTQytBch$ajrBcm(EE?AlHvvQ|03^mG~C^A2E{?tSe zgN{DRVicFUDG9b;Vw$$asdZKX>o9YvXRGh`4B8%lZI^Mx<4#lJ%tr0XRaAqFwpskT zAlP$9-C@QBlkYl{Vcck_aq#X3nCa0)CL?`oO<$Sx+*0$zC=)Vk<%Qfu7w2rtyFvYA zix`_)BNm;wF`CD*LRpM56_q&|shY=YQs8O_mJWxWF1S&%b|E7C)F3^4Kf&`I4eIu0 z&2vB)W?>bw_+@pr#69u0%iJ@nbS z{o?4OH8Ac==Jzr1xgmVp1_CTocEmP-rw=lfF!^=T*;xT3oIp|X7f-t~^J5<`2Vd&M zkhKn;zctvN569)x-Lh#+9}ZYEPBC@O+R8@ue?2sfMt)nCbnK@3dTsAe$H%8Zub4V# zC`{@*;3fy*U3iS|%rAvJh8@K(*eCDNSJxg~+Y(4;8v#B-TKPGmN!Hi9x#ja}V%?kq zC2wcnf1F?CAJcJUK8{GB+G#Ox@Q*LQr86CcmYmxy{DkQ7mC?#??ZDV10~t=ILnp!= zAx}5AuL4h;YPJea)pBqu=MIsT4xxqEYZGIPGnE^;KE0@MXfVK4Wb<*}HTm7wDmF3T zkE#qR@xVZ1pa}N7!Uj}UM)Mdu1tgwuVz1#xv^cibWxqb;N>)K@o9o3#;u}fL$eKA6 zH~jv?w^qcpeuqD7gFY$#;xY3q=5WqoscXoS4IV*e=J5F1gCQH-kx{c5%*g#U@%_}h z5jpZ|DOqCdp}K$W!12B}30wlq&Sz|LS)ltZZyPr54LWT-2ZH5=VMwUPKcXtdhzl)G ztNMdKn|qhtg%z^ja3k=eDLD17eg5jKFem)>c#nufC1m&OVSsaqq8qNHs%BZs&u}ZC z?&_wQSqa=ZRO62Z#h1LSHAnux*c#Esi59&)s$E;sghPxto3O)F0Ikj^FCXT zCJ?s)dWNnq1JqMQz-Q^P29J!=7rST^&E@y}2ZSC-!$xp*{rV{3WB4?56L((FOZqib zmTH(Z?TFz@4mjh$eoGsH?^(n}33y`YU7#w>#Sj=CzDrsF8_JK5FFJBO@DSI?qcs_b zY97?RilR6a*<*6J(k8I+ID z#u{<#m-7pJd%PKS7+VYE0=v4$8$Wyln!W6_$2wEC^FGsk?>R`SL9&wDlPGY6abXs= zpgGiX)mKI~D_q|*wfRt;aRdg={f&*%BhF7had`5RGGpDuFxQSsE(T)rF#OWWsBttN zJreFUq$H`1W{AvxrdrY!#Rrzr!8dMmet384_j}Jy-TV9yb7h79(i%j$xnPIUSM538 z#6oV8>PG34#7-MZ#R6iR0llJo%)^&3O#7B#;GZpU{{=|wVer0?{5H}~y& z4;#p@;UMz1OrT6Rfg@ZpzgS!Kaovkx!^#JS{EdfvOp-V;$6FZrDXU?b?bo`+z6Hmi zI7l64_*ASnyBbWZijWvakoaQw1<_R1C0+QYCftuv5C&iBQ`l+Y&rE3KtM2=icW^m# zSsoeu*ictDkLB&!irTH53;vKut6|!UAS<8Oo*2$C9c};U1~jf%TypN2nu+WAsN;uO zq~1_H+v{tKkE5$=kdY59{5zHedz%~sZ+^{vU5fhjJzt#PVp&3TVjc*2uAjxkdB^V- z>I_(-EM6`Cw67g_ry=^hV=qI3!>*pkg2SwuvaZIJFlJ6Kl8G9`0f1W=}m>JEp2d zw0GHu(sw{Uc%s{orx#O$2+rR{3eC#Y184y-$p>~#(fKmmZ0_nBb}NgJiLg;w_}RA(ytydx=%IMcH7;3B4c|lG-J!v z^#$t)Rrnzpe&2dyea;mke>7PQiZ}}VHVxFofRI-7kE@F#FGb*J zVhMO$;{EsA!NuV6(>lzPSwKQb6dfeW!{th!D!fsam45S!h%?s|z)^yXcg>8axNI=* z0_CZl9J^@9eT2wR!Hsz41X!DXvr*pZ$_-wE20Hv(a-oxFKrr!-o;VHcir!^>1CZA3 z;@WPhr}MH1rAE4J&A;di(IoN)ep10FE|tmMdV5;;2UbkjGUZz3FhANYS-upSc@Q=P zdULf>*bFR9aSg-5LAZJZcD}o9rGR!hjBNzo1(UG#!h>rAF786_wy~W4%!rC7j(=SM zg%{&}#`bQxWHPhhEa;BCvGt?vfMK6n6X_QV7CPhs_G(P|Jw=REHLKruWWI*-dw(^V`)qu^wWuH9v%F+wUdtL;f&OS z9LB{kk23X5SqfOc(+I~ng?#WU-`cF)`BUm-_JaQ4=zIy}OZgnmo)Yt})6qB9z?bDK zQP)bDzLT;I(>;0E0WTl(SV3}%LF0T{YIolIe(Sr^l4l>LQalG~OSkPH`rn^#xRN)M z{9=T17-c|Mxjd{(8t?^emMY=haNTNhaLjMoT6t(ei7QiC>-$tZ4dy)bsZsb2e}i+Q z7{!MUlw8K8C}$@Ol^kJ|R{$%+2tte%P)_40mJ3=8i8&5lDNI3iOPpM#V9lZ2T)cvH zP!zW~xz-5&+41-`25{K&q%#5%&hiH5Q^!?buVKW)8Sdzj9MQ7cL#EDb2Qpp-GesL;l%e~-$HD|wjPiFq`5 zJdBp_I$FA?k2le|3tYi+n(bd05AG<(9L6RBUo=}sBth_ao$zIhk-QOF?uhLYTCv2+ zI57p%YNcv5?L%SZ#ylSalb`*ZRoM|59NC-p-q!gVL&i?OZ*wvDsrwsDA_`K~?08av z7*-9o2UKmS)oOWAy>Wo8x*RgN_wP{$-f7Teb(D6;j_oUve0=><2`gE_Zvc-42D|Q&gT-Pb zmfI@=aAVx!D(gSpw_qDyau(sDw1M<5iuy1*+3*}&99{tF2it@WnYtN5yVdZ&<}l15`1UR|;QKJB%^X24w^6)B`$&L0^@YLY=2dlQ@OgSD z=_TAcgK^c}C>bM~xxEB)ir7B#b*eN458SFLj@KjH$iJ*}smUgMtQ|C!``KpqGQ@%D zVp#vJj>$xZq&W`72l?22SP9krX!JNk8Ka{D7~1a>vyCF7*$mlP3S+xJ zM%vJ2*T;|YZ^f%#jnd&^yHZ=9($^tnc5iB!%RWbM&LP6Rw5740cL)gktm!i4nnDz4 zLg7!fyc8j>XjBGj%?_w}VVSD!O!=(f>GYZ)a_F#@y1D3*0!d{sO#+ZEN#n@>g6+`C z-iuc1*^h`b8Dy}1hGClUSKEy|l9#2rD@vQ#+{!auw%AGCptJjKkh$c(7pW%k&eh&q z*t3AaZr7#iTFq<2>X&bk#_m5c4)Z(*!Ik~FzEgvJMp&I)7#Gg@5RP{6S+1n|*ZkED zq25T;c5-_)t(f9w5C+r|SCy%R&{gV=D$~680MF`ZstT;Hy><6ssdeoL9#h$D`)qBk zkx7idK2!wvj$BX@t4?3|9Q~;Y0T8|HS(&UpJ?*BD9AdN*sp&5up3PN?)?T`h z5QGm}11;aU9n3C=m(BB;xP*t~&QXrfQdjz4Xsy^hr0E8S+%e6WIDj2}bh3BNgK_yl z)cp(Ub$>afM0E3Itdz|Ctkgfi-i*`2(c}=~Ai2?FGF-S#Rl?~364AB{vjr<)L(6tw zmLJBJgB39CH)nNCt<-E&MNVhQSYS5^z1F(Fqn*b5#fN#(1O8tk%4!}jgoR5P(@t}_ zPW~!LlcuYpBBl@bs|UptWR?x~;$5bs*Q&KYT?&i*c00tjzbns6qpd}$Zu0Q8Mw%L! z;rqO>Slbc(awiw_&oIt>JL&g`GKoec!Ytgo>6C9{BLcM_&(FXEC5K{JLZX2;xB%6p zhM_@&b~FaWZNM785)`#8NfoCSgGp-FF)~}*jKAmZNSwg7A2Sf;jWmHof_v-!%KWlA zg(W;|_(sNTl#qQS%4CgvJ2!Y^rf@EIzVre{EOqXqZ}qlzmT$(dBTyWE7-OLHv8YG* z^$$Df%s)dH)25YBIe(DQ&FJAIqgCqd0aj|vEhNI=g@z{i^qY5@_lw6T3e_6iE;skn zk-ExhMfSwo9HwjT8s4&3;+e`bfpcfMa7eq~jC)_DbWIU^J_uRn?8xpPp?>f6yaNqu z*<%ZIQS^)Wyo1tH_GZ+I+zNlPH*Db66jr_m=Y zpg@#Uk##Jrjp|<6r>j?p>Z*LDL@BNIRpCdqv4hzUqab&DeWC-ntvBW5E7$fjTd|!K z5}}L1i)>0xz^X$1P5fBDGR_ZL4f=vz-ctUI`{0YPHS8m?lUW|MyEruXWoaAsVm;9Y zyvSNrw_O4x-h~~4h3+B~xuD9Jq`-Z_`^)E1KO1!lrOWWiJC6o3ZW5CzTXuH^Qrc+A zZyndxWPRUQf#`GZ!o_)z-2%&zNRoaa#T}Urjf{=eH{2`N!Hr>VT4ZJ?K9O0Xb+70o z9wJ+rMB104CfJF_D*PExZp}dUe}`J)yZ-^T$p1nuDyFf#A>h1gF?uzFoeg@M%lr|g z!TIqH!&FbJ^?IC}R=>y7IG-u^&A&*Coz3^k;gbKqf-PFi9U6T9FS)nwu{Z>W+@Wae?>x>-dZcK5*u5BD zr~uDL7z#&I<{2Z@^lu?6s24VlM1B%E$q4qzb*=90bktiTjXZg@T=YoQCgtt=#1?ng zdS7ro@EBbWc`%u?0q(|*V!kc(U~FY#THa&q_dlRGqLbeNc$uRh~7S*@vk6j zx@6t8u@t9^5bJ(FMwQ&M1OW8oPnT$MOPMV{3i*FdE4?G?Da^TuVKx!e-0r);kAjMu(J!iGnc!ZI4KTjeu^)>`ANmP(x6ENpWR4Ys3z`dZ{>{e0xnLhx0u$GwgxbjcV4t#bt-U zsVLK&O-{eG1Nc5F@T-vPb1{m{u^frDLyyREHsj4(!;~(gbF=eDRTMfYFO0eCokZNl zbG2W{wlBC#gt@W87x(cBwyALpclXC+$X%v7W5w=ASz0{WgadJ*++eXI`NRw-gn>Db z7wfIZh+cHI<$`DsW45)}vr&t0;)>k{9^(Dzn%w)5R;$6+GBf{UI93!hISYZ%Qxx$4P6f3epi z#i)?rI`ic_DF#xr4M8pe+4V<}^T%VFr6)$FDL7DVG~P~YB<6R1_Vq>*v}w>=PT|2> zewKIsx>wkU-;xjc?iv%RbuHX}rqvFa@C|BUgzX;h|B9nN;#J4*XXF`6=~4E-q_Sf+ z-eSW(Be2p>$JXS2u9W@DYo!3LI@zhKwG?W-S&051`QdBf)0}px#jdtAB`0N`(PKmE zD_hsWGq*NiAT=E}JBdDSgcQ2<9(NNfu>On!_N!rB=TKy3-D}GwcOt?}Mv5`w%iIm&gXXjPHd5J{3BPb|07o|YIOF!B-|Q5o^hGT2^0Us&YFe&)YljEx)6(-o=G0`(WMN05AViN zCOG+sQzyerDlZ7t)eUT17)AB$7d_A#E=0+nuJi5+qRw!XJAsQk9E#%w7uQ z7ecuj+hz?|v}9<%{jV+ygXwyldJFsrKI3998Th7joy*p*>nJ?DfJ~9mHZIvX>e9;O z!5RCm-nl{_E$)B%=|iyh9(z@h@-Hzx|4@bp|1U+B>3Gs#3`e^3j1x#}WiVkI9%ZZe3{Sg-SeBjY4?(SM zS}o%s*jpc=WM_lVclSIq*;VDT@_|*qiG_7ogJI$eb;*T?0Ei>CovS5LWLf_fSt+b$ z7ygK>)$B^id5u&*^Bky-?4$m#*SFUJlB+=>za%!IQOTS-_}H!l!ZV1;JDVW#$>nFe zq{OZCBpUg$*_XPZuarLA`Nu!1tmXe3m35C#%ZANLwfSAd2#`OaR%P1^mq^itxz`OV zgbf}CZzY29#?(l}Mw}zem(2UA<&boIt@eYYegmAjp;Mgd64axYM2M^(sC4 z8Fz61aP##|e9BtXSo?n`D|zVs8;%`Q8OWpYC(?5dG*P73OsIO+6p#q_88E@iaf_nY$_EH}RO*cp%}h)3hqjH&nd_T! zPWdSU-)8JYFD3H>#62se-IAaOvudAXiz71PIeNw?<=n^*S+3Z}n$7ap^shf_{&jjR zes`cB+Q5+}9?i#-8Us+|2ETb0b?B103*Qg^xZU}*__QTY7SjE@gH%ivVR?w;R7BR@6GSq~*g>Zjliw0#lyJkc^} z_P@X^dMD@r|V6x2rwyB`HUhzh$TT+;05z(D$nE@q~xsw9y1}cwwYb z<}Ww4$&45GpH4WWzV{F4v+ImxYP~P{)bh~>-{px6OS)HA=$jjY8HVR*mn+e-TQj`D z38tSc&A3^sHqW0+*=*T{EFODX?3}NST+<6b1c^ir^G5nOJrcbM8i?P(D_|rDh!ttp?B^q90NBLlagcJwG?0Ls(U7PrFCV2%!CxaI z$DO0V42soTKW6r=Z&*)d5~|}` zD43jJ>9ozO5A;8}nw|#3;jeM4zU$0fm_%muW`+4$z&60_=26*S@;PSiGW|&o|4vw} zmA_P4AT{;Bt<@q|-x*=K5eX}c#a5$a=fgoioTDIGlxZyvoHoA{6ZbM2VBGAJcj`F} zx=D&YFSz1@TBGj``VZ;hTY3B}c^O%U&%ys8tn)2dQdg%XwK#cS;+VnE5u9iLINH}i z!p80W=-3|n3o`RL1ln2N)*DvnR;xH&e)!FBx7>UjdR{*RnC&aNJshdDHU;-0#$Jhq zz+Na#rX4|jL!ap9(m`i<&~kKZ5>ZogW`HO8H_=JEQNP*BPFFWKXx=zcOgHWZ1?ExO zjIo7O>RyIOj6_-P=R4s)dv`tBCtF`xpUYw}9B}Ma`7t|3#SkDhr3T;Lu4lVAneNf! z+9*eEqWy3J7g~GCAYPffK9W-Ot)~xz%v`9FN#J@G*i`4HXy&i_l#TkP@; zFN6dcustKnFJ-nP^3ajPT_)acAoM_SVr|cqJ_3SL3JIg#+fLjhia+>4EPECdeo)AZ z+aE3=+=zvi`%-Unb~Jl`9|L`ZX!h@Zlevz1vN!idPlSdRJl>Pcqwas_iZn4L^!|HF z7??mw6@c+lGlG7yW+PGBCia?uVRi>z*(S>WSy(}TSET;R36=KdUrIf(LsFkcj4l`x zZkhi?u6R5FImE$Ad?q!}+9%UgkHb%+WznUS6RHox?UTC-#zEpxK};;?oM*ai%t-%z zVwp-(zvOpJ@0`>8Sf&Pn?msB07u<%}R>VMgr<~Cb2uY`3Nycq& zl|M{Xn$WYy>W_8D;!V4l#!1u8I!oaha>KoU zNNU`uOX9qsk4&d#vXL+a)91s5$j|Thm2G44WB^LJfQ*;hcWGQ_jb5d~@ z%l+pp@55Pa)}s^|t#Yrr&fq2T*Bb7r%BUer%=3;w0T9J?E3Tn&-LuKh8<)iz&jll< z*-}{Q_vJ7Jbi+XZ^^yIoxw%`WgrdvA6mx#ZVbcO#a)c}yai?!sbU4*$#a>KUX5rQL z|5;L5T^;2fk=uHJ?0Hg$Q?x<_t|2-}{UG>hk-fH1SIv#MQ!Z7ss=3z}7>zG56&PR( zeCfPGrF0Rc-3(2L1rA zu74K>hVS_N?-^=~K^1YjQFoL-D}((7_Ueb)V3P`AmAK3_7#1&337Mc^CY-!IS_2@~ z0ko8_UivP~ik|x+Kru}F`jiMy%kuhOa(tR6^@FCt;#3Kxa}pQHuB1*!GD!v*xF>Ww z-^6y6$@d8SxFO^NXT|rFjXAb|99}(N=260U772#tgp3qWU3M%WtHFlO?MBy8%}b;i z3XTCs#k*5;*RklrYd!q*M#EUS5b4PGZkLrL9RlBiO6rVd zp*_2;;$r*vIr1t%tLiA|ob(20ZRM(;<$9?-okG!5w?5=9<9{k+x&AI=t>Ho^B)G+7 zSBrdau2x2IIy`6Y>2e+cK`}Ji&GuWteDX_>iVle05x7Vk)xI*Jahjg@>}fxz8+2#C zsOkQmYBga5OJ;U3C0znb0z_(W6lDp-K>~*w%0R@1O4u>*+d&~r@Q6(g?~?9nZE0D^ zFOFlnW`Vg#caY`aI{ID;<4CMm8%7E~li*DUzP&MXm>hgq@>9^6vhP0guCaL~A7n^$ z^z4!=ZzR`qpJIVNJm*gpf+)8&k>3cX&=J3TLIkg@!+?+%BUqS>2HD+%+xh1^(Kl!N zCC@!9w*+WYjyI7{+z%v_*B*`5Kw>Zq_Xuc>5k6*{AghvK(I~#7Xy06DbolmnLv6Je zch%&+%_V6rpQV1QBR3m);%_tO*JfRA`~y&3ZOnW1Z1J1eqL2&jA%6hsl}_9rfTCFZ zmjI<7(VzOuI?1 zg(8nf0)zhmP_I`00icG-;vjChP*ThR*bD`*1xMq&y|I%Bh}MQ6>iEVzzzZP7RKaG} zm6is6hXSs9bO$a{4Gq_gB$niRVu5Gy6yUm#HFk0@P5S(GJxKU-< z8h&<})#-(-2Oq!p2C8%BC4kc$?4m!vj0Y_8pVil{J78HtKLW4w0IK~EunmvaDvt}> zcUyja@xle5XtRKIC`xK`pqYkf{>Ej8@PW47k_Y`cRB2(?yr7pnam{ww{{J4FW)~v; z{|THP{sE_6uYU%o3edmG`Bn>HDk8*-l5Ph0Xq}1`>h`v^j~}9PKMbc!?U5NUm^=7? zqowz(=d0EfFP|UaZhT2I4#=Il{7w@qKr`KM#?Zu^KM_skspqqj;@)4keKWTuGpqC> z>$I>?0rO3#QcZhz={s>&!(jFH(Ike2XzEbmuBiOCwX`6uX|D@VbGNtS(000GFSPN{ zFvH3kFIZFue-nQsf1M4=t|4Q4zWN4b)-*>s@qp_)3q-$rFmold9}0)%*S7VI@;=>~ zw{L$w?Jr7fHhbY)K2Ln>yCL}nCn*@08u_PzUb}VG7Lz>Ngs~&P_B0(jjHaBlnwAln zq9u1Y>!20EEH6yf-??JqB1@6X7w;h)^nqhvhlS$ zSN?^9y7ez;>F)|E^&b^f)qht|bQCTkd{Mv7PON0a^+%{ww!uL)Dj`+xQobN&1G6>^ zqhxJ@uoRGU8l3kXk)& zI8#Ei%L_Ts$`QQ5|3-#Tif77H|7x=$zQvIc$ ztVLZk+laLjB*H>=j9(M~Xv0c;hM!OzayaMO?{_Wd`cjQ0TNxX9t;z(osJl4X=2J=U zZh?#R--^HRwtkc>J}*TOczr@AADEjA&xw=w+2b2~!W6Qw|0r^5`LpTzz!xeHoy&88 zvL~4sAG#z+Kwrf>RZs%B^CP-&cm&dD(F0&Ik&HR7lq(qXIviv4~+p0V~}(OIGPMFHRg%3gq>`k!-8=rnz-+2!fT?H(FePT<;ke)aX9vF zAQg@xH+W@E`X8W4_761eiDKOU8#EatiGpPuQLnu>g$V0S$qzY=8RU7@5=trpH*apr z$2_pzF2CL?|cP%Eb1l`p>ez>10FBA*YFVc)D!UnEPgsU_a;ZaCTi+{!zjNY zxUvT1O1L>zB97gFV<0$9J2QgHMj}vpTxN9I3V?sQi*~RBV#h!)pxAg|5kSS$;e>9P7!-8_-{dfU@YmSwIz>zKk<5dk|&Dl3dM3J z??1jC+=5-ksSbc7Sgp@HL&*BOe*km9Glx;#z;3Izgq$D5`ra0&CSD824uSClXCRjt zzN}&$;&TkWsa^`%bF*CBi}K39mv?!FPG8%~Yr$=JKs7Kh`d0$DMeilQB+*EE5sf73 z%t%sZpBc$%v$y1j<(KX2Ow;&lQ0r>Vjsz-km4Rbu&D}IfYQf(|@&WjbNz{RF3>36R z#WD^0sD<{=@H-vpN6_$gKGH_)gMpU6Y5L0sc2_EdvTk<#qh9kToQmnnc>7WPYqcsF zHHrDvK*>UR(zR1Y)N0h~ola&6NpFl$Yer5QIAk!Z+6iUds5Qe76NLOe~3FK8BlRu_+gX zCmc*in3-nNQl*2?%t_ofTHC`N>U^yN!d+9?$!Q8-`;Xc?Z2d9h#Ef?5hk}BBQ}(-I zh1x@n*|h`vqsJ*ef(D+b6Lnv t95M8*r_dHOe@gpTi!K)+uJc#u`-4U96uN)-?AL8p#k2p)@b{zQ{{zu`_%8qe literal 0 HcmV?d00001 diff --git a/priv/.BUILD_images/rebar3_shell_end.png b/priv/.BUILD_images/rebar3_shell_end.png new file mode 100644 index 0000000000000000000000000000000000000000..ccf350e03e2b016ecd9bc2b0395fdb1ee13e1223 GIT binary patch literal 34875 zcmb@tcUV(P*!BxHRMb!eY!GRpf<&<f0RrXErLCY){A&x5fdux%@$E0B9(*;9zABzUqrs-a`F1vokAuH$>3Is*b*?fev%Gxw$nzzNS1@5QCh9wYieD zv|Q~QWPW{yQX-x|g|EX2i6WDM5@Ma4rihg8FT~I>dwq*@Hpd|a1$d-jpVwllnbzKl z$=dQjrP_MlZoh=xQr4UP4XXvt)C+zYf+*YBzKey)q*2vTnad1 z>qxPd>$>GeuT>76DoUZOA0nwXIDH5*7`SVrU~T*^wh==JwLyn7kMH+}(Z=GO);GMW zN^nNuWHp=H247eC?xFbE)FxsU6`U$^LiYtF2=X>k!Y_zP|L)3&^U%xuY!O<-2aEXr zcpjROssF(_x<7Q`Dq;0$o4k>oT&I@eZtg@S23ar`zVE1CV_+Z7s5HTDv=Y5;BqrW& zzw=C;yeM$vCJMhp1U7rCJuSkd&aQs2`eUZQJRERbUA`SZv=MT`g1s0tDl*&mJ?R@P=$$~Rhd2FD2&Nu zs@N#Fk5Z%WQ!K8c_=^2X%1B;SnY!fnw!x*wDXwYsCu0Ig?(eTRmKPOm#){6%Tj*Yy zoG4%a!vOo&<1pQ@D#i9*xtqr%7!lP;^WPb_=qBk8og}C4!Rin^Yk%&`1iPL1r{Gna ze#I^NYm~@7-tO#K=|Hg+F~4(oVMT(Sj7rq~Kewy@({13ktrvdAJz6SI)2<<{x8=FY zulp2-4=Rr{POg#r^ZfPl*h*KnW{A=qNAe2c3Cfd~3@xQ4am{_2 zWxF^ODw*|auHuTZyL2sLZ~@9cFLM0nQSmHIS8KUopMc=a)(%((k9Y^SSQ5L@ZtR|b zH%N@1{?{|>VA8z&atprC*;S1#Doh9l$!>|)JU#G&XXL-V^^eq2d(-b^>Ty!fa@E`b60oi z1X!ins7{yjiIWt~F$1jCouSR4J!EXCtDAtSQfln5{`05gy!w4~1$YD3p_U9@G1S$e zg4Fc#IQu8Zi-zKdn2I&`>(?G7XzyMj7eeKOlVv21sMCkqBI$`{5$8xfN}iNYB{aq>-;mmk|`LXez4dC zDdT#;I^*Q_&_6>RoEi$(VA7%UYVlPiBidYb>~*pu*#uvGmc)`lwM{i#L%}6sr>6Pef=`0mp2sh_i_h2`t6cxiwu3`N5yhe&BV~Fohg{sgT!vrO5)-;LsKdGyDJG|XPrOxiJOk`t zhH`$&CGm1 z*u!#SJ7%79=P3cw1xa+#Lhte(&Ryn+Udh&MS_LdICpYq`b&d%K2r ze?Q>}Fs_8x_Q{C2wn!1zp?gX-!R~@@Yp4&&FeHLyGa_%(Uc@C(SA|kef>B=lv$bBD zK3c19)n2+#;#2$2V>bPqwN>U6FhrqE<#3CNJpOvZJCCgTl!Ou6x=>g23hbVRoWWe` z!;=%{1QNMEtjJBsrjUs%pns2XLmdZcUhN>#i}?LKbt=BKTzb?n(RegrS*GXLR+KUwF~`8!xP6lTah7f*S$x#{}-(zo*WY z`Y^mo$bU-}J&ffb9)n)UP`Zun2-|w-4+=oWfSj{V*s$%-46UH$Y8m_A-&#p)f`0?+ zT4mJQKg-x!W$Tsy;rfV>_0vC?71{0 za)kAxDk=;9Nn+9OJdUs~%hOKHvFR|EoR7O&=RK71xN(>w)RVt` zA^W+}l7IEm=1eFW+3dHYxU1?yQK2Ajp}4cSj*yDFGwl`vbFV%wESg7N($&2rcT`lF z(eH7GHxZ~!OZb#W-zb6|l~A6XJz#uBD`6Gva^(HAq zl#S#kop;Av*l^4}vVyaIGi_AZUewhEW%5VIe5J|r6e6#u6m|)*rcLO%XoW|CMdR;jws$IST@QCc?98K4>c6M4#@}6w<}MQ8+N;z*{DPA6U-{rw()z1N+(wL zgbm|Z;tFS?cR)ykRtt1(>(lR6#+=%XS6T(PDR@v0UpFgm7Vy?+4{YQJ7I)w-dw5ze zE6Mqz)Kkd~3%QUaKj>l#kpXM2 zhvBVJp5|Fzkq(o-#*W%vMU)Tfmq<}fqQJVy`er-DmGM97vlUjZ5mtRwA9a9fnnO`j zFdDP*dv313fprrii?7vl^hRwqY<Cx;e)qfYDd+zS_Q z*+KfnwE9yB%~ELl)w4{nZ>3$6A2(RSG6Zf}hQK0OrWHC^T)UYVOWemSrX-jX)m8a% z6aTq8b7ymnu3uB+v|u|W`!)PMUX}Mw@!&h0Uv{xy9?`pU6w;qfBAQu$YtuIFcBE|C z@7&Pt3NH67gVt~PVMp;FmYac=;kF=!k6yr)nVwAuQ~E{~-b9EC^o8gx0W(urW@%FchQbE62;d-QZ|y*6LX~?*+n*zRu+&Nr?JT3xnHu|vRg04|I zr-Aisme`Pl+T?Of4OW5LWv`DzE)faxBg zb6+s_b@dJ9ScQjd#`Z%!Hu{`eE7Cq%u{g_4U2Gdnd+0C0HM7vqL^y0nD)d)RM=tSs z2lXxqQ`{3)ZuS2QykFyCw#GAwN*`V;|I1Xg*_!q|d)`r=W{+`Jb`>;-k)$$B%r+qs z5lA*Uj!$IucCAx}X*x(dKtj`)A1l;TMh4VhmQEaGPYg;#yK2TnJmM zqfan_)tqyj<=?HYZU@Vc$V)>uMZ{a;3pz{vTS;f}5Yd$TsiV~?nm!5xKF)=KqQ50SYxa^z)Li^ zWYb4|t@mNq610(UqQFK|PpPw>@!(xgSii)Q&-81BiF?!mn7Nlbn`zY&D$Wg^xJhwF zr0QSU=ic~4oX6y8dQ($tUValk$slZjOq&bFf{|j1gpfhe@%y*VtmRy`!OL2;4F0w7 z90xs*Zh&-^;yiPQf}6k+&n-A-BbOMdkB9m0i#-$5xfqnx@$1XHa3m|QCB0tFxQ&~Y zOmd25W|AwdW~ZeMkV{tc<}mQJrybTuz(xpTE_bc+ z=NU%&qs_FFke95whRa|Qv1&3#iRDNv3J39wR!%QI=rEd?HsVKgF{F>TECQVT2bX%^ z|M%A^N$k**5Vovnyx<`nZxA93?Hp$uevi@H;st)$OGzUhRE^hWPTKBXry@E@iRu=y z)ICKUey9K(v5gcdoR1xcdV|%b!Sw4}gbX|gEfsk9v@}5^zVhLufI4UR3sBA-jBEG< zC~eT4(vCWEse;@PFvlpvs}7HgwSW@wqssoDf5eGyQTgiEJ)9p(wqIl~IFp1%nhXm%$I(N08=jgcUo_LcK+v*nVNsyjqE7Nu zHfUDBA+2|E9%rLQG-!gwua4siF;=AQLZ`r`WmG}6I5#}*sDu@h?s@t^k5EFM98PD< zv)3Z|T&~4gdT=5{fz|N`dC#AW{4r@ipj;>mDlE+yO5dHb` z>nOFWT_|3P$D3$p?)F=c7^5n2AJ6TkERyxMcq{nzLyHu9wy7xkV24pgU}Wm)gS7G) z=YgWYE_*@Xge@Mb2-WDX zYjR`d%r07b`F$DEPV-Q0%U*yTbe2j&QzF1OUYNGf&LQegAhZSEGTSZ2U+|xK@p3L{ ze41AV#gAWzIEO3qbA`LDqLp?97;dj_uyfa5TZO_sEElw!1`UE?pS1vVd1%Kny*jvS zJ+>!=bF0z>Ad-^$>=zT;BzC@XkKE7Z3o0jp#iR4x;n&;1S;m0Y2Pv+*tkgP<3A;&%^_*)26B)4g_0{7QRRf(ZW z^K966q3;vygT(3G!rTzf(FtNbxGWO|U3e?FG6DRYLmJ?ja#e5b6Qnz;m z?^pv3B91|yJ58~ZawM{VmP%fbuHCi0tR#+~gH8yEFU|#>9G&7SUn_9i|o3TsLOiEPa+a871uO%Qia3N1}prC(G z|JXuNlXW_{qk44}(OYth!o6Nv3{&S;NOvQ8349BF65m7X2PbUGgVCHgq1$1M0yE#w zc$?ZK!6RW8b}D(9YDz9nt9C1~!B>DlEjYM;+I3$iB!Hw5H}!$?9^ncqoK8>t9?-sM z@{J_AVvq-nTy#Vf&$OZzVREF-`zPP1h>rZ?oAhou?36gxkFFiJuZxTGY))@agjnvf z@DAtr*yv|AsB*ma&Dh_*CUWOVeM>nqsL{M_!>j@)w3;1tw@3M=)VT4t6!VxJM>lj# zheSS&2@o3)tnS1w1#4e(s?GJ$V_D#fo}9l&m}RFA5l=@dDg-U{~N&nzttBWAE|aZC*_37<~YX&9D88NvI=a#YkBu+hgQ+2kQLbXf)|4s=AC#~9+0@IKR|XqRM_#BdcF9eZ#Pn*!;a3C zmJ{>r+b?LH;#Z|x(Ue@vQ^`~k2JQ9eW&gH{Nz~NFd2od}-3Hoh`~{&&KFX+ZPUDut z?<6V~!S1)4#PV*NI`4QV`E9>%bEOc?^u;bmG{U4EKUA3s^{(V+K8+|LUg~aTSVdfc z&$Ay)2n31^3P0)}A}>J?A;MoZ$vd@`=EfG$&JR>I`A!mFE0TLxrP(GtU=*OF#j!tpF%3&FFwQGPt>e4WrH#kc*1qMo1403U>)Om;OQ)*SHK}Kndf8ZW9nykYjaL9w)#vzFp zkBJx?Fr6+_=^~F^ePyM7#TC9xg)4@dhr;Vr_CdYPk9bc7PM>sZWM{c*Rzo`*z+1qD z9p(6U8&d)JI!eX)eVSb_B)T{v|)&$$iDT-F*p4M zFMSU=GsFcW`g4t8-)k9z>f5&B1Tu6dK;>jR)}lr3otGu;4>Xne2N`<_>I8d&Eu|Zg zeVc;#VZ2W(648sf2x<-VcCuf@4DsYL7gfv60XjVAa;suA%X3bKNeUk@FV17bcRAyf zj!B<1)+*&JlJD_=UD}gnzL~I(uN$@+DlJ4gw$rtb^Ew%XyrqK^-)^IqFmc0t4&$T1 zWNr8Nr#-nsuR?M>`FxSSVMvwcFqFgwBJ2q~KmoWF0)AB^ad$C9_z zx1GB`j4$4b!6?_bpP)S0FuPk%0l7K?nLkk=AMYY8p|f+u&fzNg%c1X$OU< zC!@OduSf}CUuCc--QVLfYWq!}`YBSgnJ?)vjWe^M$mlZ&K%}jT(-YBBbgy|c-{GMM z8MH!Hk-*|8!2@F{@7Zh)0Q;c1YZ}Hak^|wHsov~Xj^tb>s&56UaS{df%mYCOrZAAE zLF*B7oHOO{xc`YMF+;NpdTLLNqGJXWoOE)SmVrkfvwLaqUBVneW&(zy!?z?%6(YWe z4T9Ne3x7k8{#DlL$C@OF@CfDkB4$U#BPH9w<T^b&pJt@?uU#kptXfPCTZyDU;dXj*Ggcwq? zE{rjvRmUg~WR<@;Y={&1aSkIZecm%!GlYXHx;brF9u4IK16CS)V3C9_!z`BPph+MW6{+Sm^`8%+` zmk5CM&7Cj^Tu1lyDc2xb5q1seo|J;}b~KmXYPn_~t|PH?;XGum`KdBNyVXE3e)kA@ z%O_B2%>DsE%5ZtyabJLC#{AAU-&XOZdd^feiO25Q3jRb>pi=kaMi@X*a(U_C83q~} z(CuU+%sOA?FQFYjdDTY3<3ySKNt6SP*>UBkY;zK6EsQThT2;wEMXtswJ@bNp$)*Rt zUOdwDg*?Dyk-)37BBm}Fy+4{~G)a!N5y%^Tp;U~ihUxU(isDO^w+(I;Q3V?m9fmJu zNFC3Tv}^dh(%o8b8GlgRUg*B1`7kp*XL*mqZ^))=(;i~det6Iwo(irUih3thzZVKbx`#^3p%sI;5 z<}^7raggS+`^;KpP|{x4L3at6PDJ?IE}L<~0|@-{i_fAB424BR)VP$sI#~VGQu|II z;=3>AtMJfQbWR@QfyzAXmx{RR1L(#+2pw}w;p2g)E2RLbN)BDJn(P|IRe8GoXLwhW z`{y2d`?D@h>rhH%6UUt7JW^278$8yO-v6lqvKJqrE*4TafccN2M?g;S_ ziYYTDItx;ZFhS{>=zQS<1-n$F%C`~8H$Ysi2n^=IX?b^Ub-?kp;d^UTLQV(nq+Pds zWtG-;@S)oDFZ|K5xVnr#TNPz?rAC&?FKN9mL%xHry|MNw`GMO;pcDDEj(n33Gb#FF z0pJ^PDfdo^D^5+_G{9X2T>&sJ1361}JYExGK6qmx72$9oe|=>yO&5z#XJ1B&-LrSk z>i2i48#DD#H0$D0+pwW?_~mf6Tq?(t2ICNbW5FnTM+C7@PLAPU^hR&?fL9e-}}SQ5uS?&rD@H*blxL zWTf?BD3b2;UIHEWO+el{s&uQ%?wF#m2Si`8_ALjre&-Zyp3n&{cMx{A8x8qu&P;Oi z>Pw76_^mXdaMQRX+xauJ0K5%vc-}3c7%uHFkzW{)G~9x>F&M~KU-EeiI|99;Ut#S* ztKyh6MZK|A!;h1}D22cc@T+vwGF%H@My<=4SVRH04sr;WF?lil&}MrXx|yM*m4e#p zV-c^cU;BUR2eP*tYFrG;0diMWF}?b0b&s7#RMgX-4AMt!kfE24gCC-R-6mg)tU7p( zZ!8O$)PLFmnzXS`IK*F>e2RBC^5J3>FDeD~OYb42WEn+gd%!1MABUQZgccLE>$A5? zfFBL>k10KRZILvB`JS8c6CQRfGY)pg7m9h-v20!s(%pMX3Ng<}P5%OSq%jqo>0=pP zzQW2#i_h~$wjJBL*;RRjDj>n+QE0}j1=hkgqj034Hp4!7C#a$MVs{7vW6|4YDW z*azvBwE3Rm&Q7a2P==d1x3%0ltry#nolHGQQ=m{CBa0S={~%7fv#a02Gso~afP;kD zZXrbX>T|+ze-8-*+~kn5(FNy`@q$k2a%3ET*CNAN>>QPW4CTsU&x!n-6>4I2%BJpx2v%Jsv!?VcuoNF$GS5r6224%StbSIwLJYZ5f z;kFq&es;ISsdV?s1osG=s*t*!+aLs%@wekKDHD18TW{Eo+;9PU zE$S_$Mg3V1A%5w0#4RTL`I|esmKxo5uyVdL-za!RzBve~t`%~u&V8uaZRRoxdg_#3 zbs@Q;ZLoIZ+?I*7soDQTHxqK+_&FXteVuabx0R$-M^Jnd3=ma(}Uc_8Qr_v7L|yYmg>kP@)kE5DJ$d!1zd z)lgs#2)|9WgyRh#eOh9-lKLm)zK7XA?9Vx_v|)`UXHF*cDgV;dh5gK*m8jPW_g2*1 zKJgo4U^<$zlfVjtaY-Pj+ppVW$aAF}ig!HyVfkBGRupF5Ys3wehm)#HU2(V!m3Np^n1yur&C#`;22k zzXYIxR_@t2|Im;NcMm6FGqaB=RV0mB!ST_;8(;7EWJkjR!scM~31_#p+TuAvj~bz9g~3L7g?xK$sl zw$>Heby3*wwEvQ}V=3q%L!Ty(C^~(rHs|^^X6*fXrkU6vygu$D5Z`!&k0f^EaoQYCxwPER^`+uK)uk#unp` zxZPPcIKwC`^ts;=_)9+!T!tG#$FKSwv#H&#Xsf50htN@F`tE(!s$I#TE!&_r?@!P@ zB9L(jsS^<#GRe#-r*B(D+tz2gCCDMAR&&7)o&~SS7piPazGXQ#$Xu-xQij<2HQ_#C z+Fe=`VhJlVawtqVw`!SDi1B8NGWjF>ke*XY^Oa6JN&kO93Q>&Kuaf}=X3qiG;C5j4 zoPW^b|KZt5tnWI%sHuhcmS|vjLhit2n1ItQ2XJ`Z3QsGG3pTjzm;d4Q^HOaSp#H$Lz;z3@2c4B5lb2E!n}E%D$Bcp->`)mWk>taR{LLjm z^P=l)PDGVgGi@uxbvoxXnm_52tNJemg9J+) ziHk4q0rAU29B=mFXLniGfMy0iWA1T2hLSOwhjbzZvUEbo%k;&CtmDHtPd(`CQi*yp zwG25tugcwVw2-_Ilt!k^lwCjTgo@%yBZ|J&s<$x30QIHr~Z*x=9VRvQ>>nrkN(ag4xq5)>ZF4KvssGg zUBLgiebgM6h{_#B33KBQqWhYi!PR@lC!X=E#?2=p5jz16$XX|l{cWN-p0(zGxb|(p zADoLcJ1I{KUe6&q8Hf!0Y20mhF|@S=iP@kDaCXf`r}lDLE6^CaT&3zd zalU~`JRi5oL{xjUD;Y&=HYfHdOtcN&e17;An@$SNTZ2Aj<}P|Yn%_IRg^GcweCc9g zct(+ynEUkwu=FmkQq#KweSf=FHaNXo!GU!;Z&2aX?2h(%iypk;^hGFBV5A8fQN2vu zeH`j8`JI?*m31%l&cfI_8@P9BH!gj{K=OdH`wnWEo!}*9F^sIKr7#UgC`y=8a*DY5 z<*JuWU%Kr(!oyBaK;B*3g_9p`tGT$KUjVz#_eM(Tnt|QV(;|V*1@APUO46uCCP$7? zM&%D!<8fojSwedbPPlJ`_>^660(%PR%*a)VDwj52B)#iZnj@IKtKaQt@E__q7b3L< zbL#35;vFr&7JP-&RI3zEhl2lLM{%TU@g>U!^PQigo_O(|qQ5#J1*Z4vcdj_v?tBir z5Oa!}S^qy5brOb3xf>Iyi!HSI7Pr&$a{B36Utob7b18I&x-gXf0D8TVpc|8vjH4KC z6&P_eD}?*J_Fds0Zn*U`M?qSyByk+fj@fozzm8mMeR*d1m$$^*=D^v8U6Tb?Uze4M zJfmw0EB`Legj#+shQtj(m{de-;i*E9lahKB$gV)keSOXkW;Qw0ZEq?1LSck>d;cu7 zM|+^Gy(seSpy%xWyEwsAk!Py_Y9s|W7e_C1LIqCPQ8F^Hxv zh;h#D@ehh*H0PMk+D3+MF0K6!Yc=TSS{RNg zaBICU$Nub&ZF(F^ozn8YY2xN)Y}*Sj*ojGNL~ZDJ@fPs@Lu2~i_Nj2*)#Do)TWIxs z20cgH#(N`yw4r1B5Y!hnnTtq}Qj7kL z;|Gv^*Et3USuXgZ=MTte=11-jO{1B|Gl+$Ji`XM7M0oM(r;>`O4N7LdN!jn34qsIT zq)WT($K9AWh+%#-X~_6d(7lJgcGmV8VYq7U_rS$~u)vw0Zmjhd7k#hajCy1+A)0>|utdj3{{>wLmnpp+5d zmkn{1{J192_tU(3zcbMajP$tv2HBVF+t*I&Olpd!8O>zk57H!>Bz$gCFup6@Gy1K8 z^29(uLD8HNcXko<)L4Z9VIfz?X(TbnBr6WjBtXij0K;nN!tzU`fR^_ecZ_cv70|BIpMF24dW-Zs2Q+a zcI=3RB?XhpH(ND5k$)F&{nag#XDU#uJZuOua0B**DZuB9RQlt#FudJNvVOZVN5Z!8@N-Ai|Ry~T;;St-Q5~buF z@uRzqf!+Nn#q*rDTBx3IHhzM$BVsQUwtoyJ`rykio5So!an0t@1Y0PUJG$s;`cFD$V0V-41^QTcwA4L?6_3a~hJCL>i6QL=3> zH?C+pt>)Qmb(hvAN=SlEHXIY8OXF12K=H@RUOn6KufheOV+UI{ouk@BBUK0lkB-rhreMMDBrgQaK7`n%gZn_M;Otr|aI*Ar*8tL)-|a$2^l-f$-k= z`G`z_P>qStM&3Hftr)^av{2@!8_oL=Bq3@=BiZ*2lH9g0nQICi>YO~y4E6yk+iC%9pE3_C1 zbn{c_aor6+>v-TdIw)F5={tYMY`@CcW4|%&&gmUMIVAj1^@z2`TVENmnCpO(l~`Kj zNZxG1-+VtlWycRO?bnjMe_3uT5?zkaUYEDE6f9)ehqm;uifnk8Cy7u(!Iq@a$J?AI zVkYA=&ST~F>|JC3e0qNkt9ZP(k0~%EIh1&^cj?u;&EteB0tQ(njABP$j}f!*wR= z>jD~n^w@G+m0*ybj)PcXBH}6Au4g(IFPh-@x?*~p9def{%es~eE>}fhT;-x|KH88h zHv$bbeiN0D(c9XUQaWK|e|wlcY{RPDEt=t{_ndz)ioEMY9A_P!u9$wtdr*W=p6+3z zV(IB=3hm%>a}0@%+Mwl{w=x9CsnS&-J-c7-l9!isodO@IlL~fLFS}#QG$^jm=b=N_ zv5ADGgAZn6oi_Y!k2PNW;j@YG#nZ{)2Q0xtMdwY9TA{0Wp*VR<(jdjI^Ygs&Y^$Rn z7h)tiEVz>PHM>*zS0n1+m`4ztVCds8TRGy#vNGypRr$JEwpZ7kyjkw{J}jVzHoF!ND42d0HY0FrM^{?uL|$P7*K^#{d}=%A z)`u?S`H-x<9!$ev4fCq-Eb=+og%6T#g%ZYQB}PmTg4eHALX0-tHeq(yXXm~*K7gRT_;)S z#OxlLVlKG=ThX%{RiwyN0CE%naN$Rx^&%?WY-QR`#oZNDq?HlVlz>+TfV2%RNgrs zlsI~PuW?SuR)c$*Or#3Zvi@c9Ioy@OdZ*8cTYh%lBdrL*Js{OmX=2QC80h5$MIyR) zmUo1j<8u)C67T4LWxvogCGmwoq1stF zbdx@4>T^rhoZfS}yHu=K*veEcJe_9oQU0fqx7!r_{!vjZe6h95Kc*Rv*n4!~V?|k% zRZ$6QA8!lT>5H#Ejm?l5c<#QVT`A43@;;@+RwAMd`f`U^2{ct}sW2}I+^1kvy2UPg zY~|t3XJG|cn8?YUfgOWQ9g7Td8+VlFhLpfSL(u1=15i*O=<}Q(V8u9e3>j=0FeO2$ zN4klW;Y|ZlsLI7PKOimsJz>3w^@XtLv54(nn2@f!eAOxJ;Ot7bN-1EjTbS9_smQdN z#9X?6^Ln~)b}9{Fu|FkS*|WJ)kBn(=>Pk+O;M{tVw5~)XbKvIIQ6qo?kU z;1korcrIoh#3(K#M^p9)MsUZPl5?}8jBh)H6YGWiYrOiHN)D2{1YbPw9v*g`{E)W7 z!E&u9!W~2!5KcCsdi-lCrR0g7ndgK))`3E48gQAV;=?z7wt`$w-6nq zQDsTL`%p=g3TQty)oMq(Y1TQvbe{}I!mEtwHe1T4o2%&L4|P#86^~Y_@XRTxkuP|< zN8>`6x+Din!({_D;U4hz8!lXG9kDfW!c(%#x!$OgjVQ%z+(5 z3ZB|XJ-=x#e`I`7V@z~h;&Aha({!KBV4GUt0Bx%f;uj#KQ$gXP*Ev-aClEMV7*bF} zJ;k4I$+_{N4f-B`11>!vlswD51RpV)nT2^A@{6K@W&ZTo4pG1_4|6>7EcAfY-3RF6 z!?U@jNM;JZgjsvAvG|}+&kl5Kl05yblM7k^A2-BR@NXF-0b?_Jm=nK#2b~5SI&Nty zb(T5)I9n#(O0Vt-1{jjy0R-zLXRhcq&(L++K;ce)b*V6l7bAtjXyf13>pa6-wE6N2 zeW^1>Zabt`07qh5PO?%zfS3>d5(HR;i&LS+IyWs4?!3{=g^9m%SC1Mc93$u6-u|Yo z&xipS)Vk!zwHjT;qS7CarU%b+5;Ee!9GkF(YxFyRAH-Fjy!Jm_w=_+NI(sIwUt0%^ zhh9ITL_}DhVbz|dhdx_P;hr0!kZ!sd5t5b~bkj zs!0M;YgL$NqihGTqGl*Uj{(vZ@|(Dy%pd8BmHzeA%T0fm$b@&2dep$c9*LDoblcTT zhDPS_*eAFi{D@8{N;C*2PD?!zkYhY4(^S-kEK#OFUG(E#pR%`gpf{WjiPXlBLIsEG zCX~Mf9$9m}BbSVxIwNE}>DX?Y1T_y5>T%qf{}X=~j942(^n~Hj{KyYLT1b@AZX(B~ zg@O8qaWz%iwAjF7Y1=9ILp^+(h=yU>%4I{)79K3|!8*VP{cz+iHel}ei7hPl*NH8q zmQv)!E;NSWv?=P&&Xysf57-oG?%EUDFeU?oUITM#lAp4yKN#M|>s$r3)(Muxzfn=E zibcV7CwM+R%})VaBmmwIgd?!xj?tUH4r}p#%y9FnA0UmcE;e19=-MA5yv_eU1vVON zKN{nz8;^Pomnn2pLCV47I{?BDmY(LB-wa1=(lao=jL-1dg=kT80A8f)*FI~|E~Pty zyq3esWyY3HNbCnrcRe_A<4L+jXENZEMvD}-BU@tc@FAYE=4vp7dpAxW*8kfmJ)*R5 z@79(V;CJ_aVER1QM2Zo*NEvM=%+q;;l_Q=2a_ct@nBoN2Zh|2cy&@|^1dB3;Q-R2P zIoWF&1pA`voe^bF*T~FLUIB&Lp;&Hp3|WFZjJSP9eIV7TanV?ac%yw05gAKUkzmsZ ztf-o+cG`h4n^bwZ6pdhiEaL%S{WYn;JQw7)&Ok_Q93#=)5Ra96pal_zxVF>#K6YM0 zBsSaf4s%~Gjt*ECnGBy)>C8SM{IO`=WkHlT2mw`R3hIDq6!TSdMYxwa=36}}rpPn; z0+mVecRGel$@c3^20Mrx`3*d`5s_EX_){cuc>f}@5SBbIROqW0zJg`TjvTh)0q(?M zGMeu5UK^@jaOU%_ z*NOa^)`IqaM4;@nuCiXxd5R_HeZ}F{H@NewXy<2w^k%a+hKVWk-Mg0OJ2HSQGD2@0 zO&op@1|Lx8JQFkfB2n{gn#MK*2ka^19}cyc)0ZPC=Z&A?D$IkjheXwa{-xjn>2l3A z1B2vnz>IX(iI`re;+c>?h9_hIl`08ewEr{q!wHfP6md_eTJc8f$QSx0S3xaejw3!` zO_kV#bQgK$2;Pu5i}=?WY6Ox`-Gbp$n)8^RD|<#$;EsY(dMR>$1GpO=;vtM(v`(l$ z1NKk)Li}R9tPegFAlYM`uuNxqp{D$n1$ZZ+af5(HB5D!~@jWh5J6#PZ}AW)Dd*{VaH0Ah)A8?ADcJobxN?Q!0eR_98I zHnZj8;M2ajybsoEa_NsAXg?r+)}mO}ZrRNPRXAu(91*z7j6QJmIF8V@)-|S6Ek(Xh z)>Y{HT4e`Q-Xr(90$a`dY0u+VyO^z34*6*Eu(m6B`;6uZG_{B%oQRQS!{w86#=I-Q|~COR6?|yai{NaW8F>z2b?cT95daKD zJ&cxYt5OGivz4S>trMXu&6Q4rxMisg5Jn6u^!-wT&rjMG?ehpAGbYBj5;6l zQngZvIadxqV~gk?vm2$fPn^wBmtMIQb6<(cx-Ag*WERy`NCg< z$G9z6IpVjx^Lt)6wo7$+|6+??<%#GXguxY=H9*6-luyxb;*DZ z#|Y&)wN{FfQg8JmI#Jlwd}(^cQ_Q={$6%%9BmRwjvpd`P+krR1P4lGvVo3B(eN%Ke zHCFg}6>!egiCuao#>NIlgO?S;FA?1Cfj`WaQvtKI)o1&=jX#*j2mhcfB)XInO_ii+ z_M;g;k=*QgccJz_0$nZ~gMps#$?zI2b^x%2@)%_;D#1D$sWe9ukRsk!4pKgtC7GKL zj6Q2*&J1>MWnU?NB^=aV(URC;ppvk-BwnlAzEF*OoG^zU#B4_h8)uD{kp$b5vYw+X zf$}IdYlCZyYmOQ;U%Zu{4^@fpEiOYq)xe=>-kA(0-4xce=jiJiB^sH-in+%Aii;hF z`v|P_*o=AqeV7q=i@JWVV$|nIB&Lqn0fuy!Q&+xX=-It)PXx^i6It4G;+EK+ z6A3>yU%*mGImVN?Cisd?5ODiW$-XdEeYhbGA8d#woxywrv^*P_7m6-Udn9nhv*U%i zL+O+;$!X%oHt~F#)tM*}@Dj@_L`sB1%G92ppK{bOc4eAn7xipaPq@l-mR&USC;88i zAcx)rIW_#FBRFi9SlfT?FK;%>5(suZ14@E`ec~eNT~=1u2AS&PyjG0>(p{9c6vdpW zm&nUaCpmpoD+*1+D}|9|q9xj2wp!4{RpgMMZ`g5rJ@~MG`1rF4&+0zHu3Wque#RMg zv8#0Y3LYxir1%1dMp2_y!$QTmScdDawQb)w`^+Vn{U1X*`EQW^34HwDLHcCdeD)PV z%WD=Zg(RT$ph!RDxz_bM{T6+4<5k}ZPdWV`*Dz<1&A`qIJS&A0j)TlFZm|csG|h5 z6pXP{&S#~7&7qn=?7c6t+s?d^>0~t5miIrv^p(t2QpP_tA95#xuUm)IfW6GqqyC_1 z?u{R@E+Y?3K=t;W`8-sz^eyzyiu8B4$WD?qNQqk!0m+==!xZ3AXp`H-#x=7Rjn>zG z@9g{=LHzHcx#}!qqrEthvv&663LZJgP-48BQ49@*z3a^&vfOepux&%b;xr;D8Y(f- zo0nuWK%^<>m?QXh73B$Vsl{DM6Dbc+!eKM9`UMjAY=07oOA%OH9Njcer5S$SU=qV) zf&z2xo?A=3WNS2by*j1=YAflocv^}-5Q(^zg8D%eM{-tHj0PBow6~vncED;Yt^mAX!_ZnY`ke!3QafWEntCo49-yYSVMCyE)Gzg|mu-70l ze6JqHX&v2tuadiAO|uHFMSTSzZkR4+B=Bs53e$B!=K=^2u1g^aH|q(t1+A?3*@DDm zi9UFgddb1yhVTMqiXY5`n0y^A;cET(dJ|$O8O9*jmRdK?w(SX;8KHMI1+a$4i^PD+>Bo^H_$(z;h)zSG6X%JIs)P4C*bOYZ{F(zqn+F_sOd-%&LL?6T9XG zTm0Z^+CxvZM`6s9O8+V<(?It#Ikwya<8R52qZ^yS>N`(e#R}FDZW*0XG3E|>#7FcZ zJ%^N0_ewX?>F*UU-?qvzCU`8GiT)}eX{9>A2#>hQ{c?wT%BhJzLv95R`_YdR!bM@j z#8s~(JCU-ky5ME+5WCc0H+}D5zcQ8F;90eUsCSd=iBaOf5VdT~ULIpuY`h;cPQ1Oo z%9o_afA!$>SkF7pY}Hs)keVhhSFI6Lbooa66^y;WGM11nb`)@;dsfwb{bRVt6kRns zx^XMT8Qd4Pl9?lQ*tg7oe1b}ii;Nbcue{o4>k}z-C4#Mf08#$HP_8t}uTZDIRidPI zf89CB70!;Eget)18PAFvgWW1jkvk0D5_Rcn`&uI85RD{S7K*w#L-ym7bd`U<9@M_g)J-2|4oQ3Hl~oa^E3JNUU` z)no0Agqu_JWZs;J){bwkZjF)~)flN6PdZJDePK4NT}fSk_zR0-f3^WHpDNguYVW@C2V*kR4OfBp6|f;9{v8ggs?G8PHlRUypPsv(S8ndrP8#j ztY9$AMco-T4%dA(|Ds>+c!Tb#sfP#5i&ArfK8+INkb>Y#Xk9x-Q&d$fdzq&j{hYfw zFI#jfP$b8zh2A7Eg()(jQ)4wp38+P}!lRPss66qe=C4c2mvCPDx7=5U5qV>9*lj{< zltrT&An8&UotI}8o87bjDjDHim6ifj+#B>y1N#qoT0b;M^UNN*vcpUu0`NWlcO{bO zCCI0B(v)gTZz8k@?(GE~oymf}*M7W6F%J<#!<=tlD=3_Q|4xU$FTk)LnN3BnS0k3J zxy-D5E1%Pz3Nai`SuIU4h;tq-KKknJv4}ay3<+QdT ztJJYQnXQ=lFOWa@D%?ld@~P|l&VyG=rg@nrz%_yeZ>bR(=wn8Rht7_u#`)!YF=~cf zu*Bg;bnC_&VnU1;UK{aNn5H}y<_pH#0MCcdfDf$c^o7E?WV!%xC#JtGvpfpTpR3{D z0%^>9N}B_Rzg!%r%e)G5uf?n3--C%BGx|^UaS(VZsC`u0o#ryN9`F)&20gV&yT1Nulv*bN$IBy<_eo z`F`s$oc;;@KUlgFUgNrRL4kO+HH8gj!~mGZA>WT>H7R&a=RU~$L>5rMFnx^aBzR^+ zY7t-1&mF!Jrh`N92TMH6^~^;_cK|CI&p?kNyoE~TOrT%jvzpje_pwJ#S4lm|m{#tV z=GRf-af0zQ^JAd6N-cr|2BK9@{*mw-1QLGBA0?%g+p+!myl)*hlYuX)w%d|)dW{qI zpT8teaNsuod0Wi{Y?1>Xc7Jmfp0(?e1C=e@;W$5{22d~3RYWoeALo;1_|+jK>r(HQYQ1q-AC z=i>)InN9x{3k1)Y;*Mc|ja?avlTn%y9;X9y*Y~pc6P#hb=_se(v7EI6o6im^T+4h$fb|coW zx6xb(2*&zinxUbhj({LM5L{;cCjb{#bp)LuUy)4g=-K_9KWJC~wEXnI8syc3D)W7m zJ;PMmxUfUyhYrqa#b6bNiFpUB-1WN1t++o_vfFVhCm{Vj;2$mEDGUrlx;d&dVkQ+s zL2FPiI^{bSrmk+q+|c)ujz%n2Z7NSP&pKA#5>%mg9+TLWbGv}90S8?H>{duw`x+Gl zW6?Oqt-!;)KSJ3wFfaU1ph$51HJVkH;{l(oe+EW~Qp2yBmo%UA(KSSSZPypDtpqw- zd`d(=c7)0QryPeMj{UbB_kirW2kDa6Gfxn=d?Ira8|MAt+au%~u2{^v>?M?jn85!F zj8o(|*zJK00N*AzAbA*=q+PlHKBDT}8&&hv8t-3SgHz@kfNotDen1NXS|5LmAhhkj z0LxySVs}l`=E7NENTQft&yfwYQqWwE|bN{gecq1xnu}{RYbGOs-RSHVmnbqri zYK|u3L~}y1c-FSg-*sQJLd63NgJ^QIfAdJBKS~5WkfRp$GV!&hb=_xo5lx2OKRfS? z8fq&YFDejK#3Fd7=n!K`6K@)*NIi(qZiCcd!X=!_FLn*V?^7@+@m$Dj7P94t_#@)Q z;4tR6dJa4}La0G?&OSshF5!NLmN-lH1zcoNRo27!rNKl>))V3_m6}tAgI@6ani!}x zCoy3zVqmDIj4f(tZ+{bGuDEYE zHS7J+ERLhE4OrwhA?3CvetQwtC}B(Y7<^wf^akXKVz-wZKKQ!IfyFf+hvjHF<6rZs z=Ym0q+*do8gR@--%kK3(e!icCdp4of`y&&$ADz1<}b=7Z}d zF)bdoYn!dFzEMlI4hW6BEZX*&=!NcJSr~5dnGMc?9Yy0PB_Ss(%Oyu!!Teyq2ED$b zo8oOc-Mmu0(_?>l!d>HSpy%Y77CW-Phkb*8g|5mCeunu>m{NTKp1#Qn+k{V(mv$HS zRD$*BVQG==$z;f-+h?5=7vhjM2cM0^*(>7LUtD@wwE4pba<^q@ak~vtVMVV>) z#zm5pep0orp|9S>3Q(`Y_{!M$hd(zbkibI0BZtdNU;by5wzAH@cY60bf>F;Sp9$yD zt&$}6Ews1V{4C_OoM)^3{i&nYo(dD2A5(@E8$RBhiMlNMGaZ8+`zGuor3my?CEdKe z8Ds}4(U>0`jec=+_t|ben>&jePA=&?`_Kh8xw+eS65CM5?5O-RXl?As-Qwu`m7GLV zR$15~dJ9i>?hlj~I(0;uS$o+kKfJb6>&t&Yvb*-%9tzpVQWUpl+Ca}JM6dPihxI`% zaU)0j?8xWgQf+P3Z)4X$ zZG1I4=8ej^y?~DImm`p7vc!eWck*dtB*k~2F?6QtyV?-Gr zKj^Uw>UmE1=;je4XnIT^^j#B`>mj~(?L>w%P|Jkve8=A!Xk_N)J9-38Pm{H3ayrBrxQc&AUz-t+4{GVW1tnqTh3i`wz;q6E zSgd*<#)NO)%S?H{r@L@|csid15B4RVRkXms35D%S#%Iut09zDe9RRlQXupBR&@tHu zKD8GCuWcYl@fx??w(4QITz*-&DrX_Y8Lk|BBnbLfQ*#(-YRX|UAC9I1y9CzE>(k_1 zJIe+DIfqqg{W{X7=F0Otg$cXH8&NiEuVF`eZ}iy>&hbB3d`{cFkMN||ya7`Pe2#dc z)^|V|uUZ@77z0dA%ki6lPIM}e-+ZYEG&E&BijB!{!yXIee3z%p%M@3YZs9oBMDC0! zM?dItCB6+FaOhi~OuUI+>sg)geGFHcFxz0j(iK?F;tya3g>T0G*RUn7pM`IM=>X?_ z!gF$>_DBGA&TQ^zGW^G>96ywA@{gXZNN%)@)d!0Zf6*O<{2d#pRsR1%OFvgC{-=P| z_7QdKspLS{C+%q+H6+H5jJGZ^L@{M)GU0q7my=CmoRyZDYiN?>t~A ztnu}KVO;~ER%s4>!QO5>Bs9(&VEDZ+)B5pBi%A>^7e{;;2CAm$%SG4G&(g)B=a-R! zN90V@4&cE_*NIL@!xH$ay(lRB2ymvThae7WfXt=Nwdtau;dI7TRWO)>Wq>NP;s*b* z)b;I(-a~ciQWyScgrNtDgfBe=Zih|_jk{po5jDIzOwD}Ys!Odsr#^|p+Xf04Ws$S# zn~N|3JGS#2(TV>VW9eYTt)oN|^`0iDj~@C$>}1uye?frCL$Ib6<<9C~E!vPp`e8__BnuSvX6h`N%c(>?R0tCT5@hp_FAUbU1(b67%QxNDkKu*#VkR9EM6Y zA+nk0JKzzFb<6ZMY=Uq#i(V& z-Vq}fj&uow?3gp8hnr-K+Qr>$8?#H8lcxs|By86m+S3S6p^F|HtYIt7dsL9h@cTpd45-#@<(0&f8T^G!hvZQyT<4#;UoSMe`M% z@P_n2TtFb-MzEYjJcubCBOg8s37F##_h7Pk1JDkxkN9G6dF_Eqr`y_mVge|9xgvz# zgxp?OE@zpDG(v&u9sLx=f%r}-ubN<#_pB?%L{4CFbRL)R0pr|@jmZX*n+Hz#dKCiV zS?+hddd@$IXG}f(aGeIQ;I;H6;2cLQtKya}saXBEKa(jp(66#J?i$Ldn)f7Mc26_~ z%8pe|R9mxB?W9B0i-143<$fI0Zu?@=;a@el8lErj=rR^^?hdqtb=Ga(XtxLFD+^Q? zuUUi{c;x6&OBEs|beB$b4U#d9#;OB)A5QjxYYe{?UowywnbshLqLc9Qu$|FU0WiJX zQqb`Km2{SUmhDh7st-q}&*s4e9OE1pzFkyD!0nG}oYJjK;6pM#0r-&Yo^4%QHXKQY z9g4YXcvckAjUJ&bSzEO2eP=8g;w;vtF3er1Cxw4?i64Iub9=GW3ZBZWU%tR#Efw~# z?Jn;lZfj<6Y+GSJRLn3Twcns;oTF@ZoR&&wWqA>&BmXX)?N>Z`kf81zS<}ZYOq>s1 zU@)R3T{e$4z^wbZ^wo1L2rgc;I}#!}lrJuyS`zp$Mr*wPedOCMKsvf$kw|q^#RPWC zXU|&6D6gbG{Ia-pgoqR{I)x7bm0&7Et!lg=)H@qayBdB`hEp1zFcOE@1grb99x5;1 zPgj=ZBY+Qdp~Pk}vsArcT{7cCwWO^d8`Js^*=&)Zjw@Q^d_JS$r>9#Nb{N`!Y&^?M zm$>#^&~Y)fR{Mv{&nZ*LX9u0U#mfXX`8;PAW^Ge?WyN=>sq$J!P&-1PZ7cU1%=$b3$j_og9NPlTj%@0ZY?w1OP3gnz;>{^D%Yyn5%)Ooob#W} zDbp&jnD-lG0xtIZMi?z}wE=6W$wRzWVc))uvmtdEq;MOA)b)p#j>-@e2)qkEw+O6f_r%hJI!c|`$HS|+dh;u(U(x< zZZtsNH9n>&FKmY$U)T-{yln{-Q$K?K>dK5~{77{N<}=pxSzjH4$0AzNZ68;FY?$BD z4kZ%R$b5KzUOjY2OyP7QwDnBBWQZmTk6h!J%`$Re zeXL+VeM#xy0LF)9Ee`NKMA&`0VFPD)Ae+slrA+mF@)~EfsBm1oty@5bA;4#_*~B?e ze@9f8Xr=BtFyES_!zBHBK?GU_*0|aBuc%S;EPbs|@oPnO_(oBB%&*c7fq#C3(Tu_R z3)6t;fz*PJi&s=w2J&0%!W_CR+7w2|r)XDe@f1jQcJ4BhGDE(Fi~GC<*AWo{R_FQ5lFvc5uJpaj{fbL(23GolzFN^7yot z;0kM-d=2j}r$3uo-axj3DeUrSnqjq^sf(oCkGh3=#C3Q8d%0G z!~2i*t^1TG9NX-ZTUvxdoT%L}AbulSzCL}$07oC4N%&tdhojYj-xBYD&b3Z3V-C?KiPJ;XY_)e2uy$n>J&87{P zzG#f_SC8)#Mi@^|!OSoINx6RzxUB%-fZovz<|xer%39oW%@O*S*(@|Pa?Y+do9#M$ zeU(*y(hHK8zGNuXSb$Bt|AD&&t00|heTQ{QL9d<|odc7yD(j@PV_V;0<4nF>|H#*U zA-3tG0}pZc8ClTlfWCENO=-kf3?RMWj~J=?*6{C)&&c1#=d-Wwsm|nC|JNO)a)%Le7;XCk%4HBn z1GWRM=h%Yl$!$#QCGNj-Tc|63#JTH4%f#5PMUQ>=5#S!^3{%QAlO(#Zv38(sHDBT8 zR0oynP?(pERiiP4Yc`lpJ#huuuWT@J>LcrVH4G@!DmAJ zO2D^G7Jm}5#u)JoQ$4IN*j)dgDK6-Z?y_Wn;`HB3DK6pPP@Loz<~5rasbVUjM6;E? z5bn!#W2OCCm~AQFs;oTA175`bsg`j9-2N=umdCaa6@EAs$e}3!ufX86qCRxUoucc) zZcbvqMJUacuJFUV2r?7E5_$;(G%3W^cpw8e(bKopV7MKF&4nk|ke-P5r21C5`FyqR z_dg)^L$~E%wrADkchHbc|<;j*hjKj8o z=R-og+e$DRJ^+zms??^y*Pmgwpy6(UiZNhOslF#GZ5^<~df(gjqW#Q86?@}@Tir#=&e|>CZBALlTc!+O}SBXa)raz#lKy6f^=!!6$L0P<--mg@gU) zg&i`B=pMMoF_=eJ!wjUOPqb2{n|4AH}o-Q?iOax1cP>T5n+Nmn$4Ogu45AO>9IEC9T$p}SM zYi@*q>fdc;Z;8{QzS0T>mOe)BZ90zF{nzSz-?DSt*JU&zHQu)0#rXI5Yk#Z2ez$uO z7v5VtR;KnJbTrMMIvP8-j+R2%tT{}~w!h7I#J%~?1}H(e-FwDEoN`(e+^5iM&a?Sk z8aI2+UJ-zoC47TV5NVRncZaxD_~4-AER$R#int*D3_KTzrd0PiXTzelg7Ei71~P(v zBTJ^4aT?frVmV$;a*vZJgEAJyas0?Oq7CMvIBpnq?pyUkDg}KlIWOsSWY!=E$Ej5O zzymmZ+|SJ(#%(59IJqB`boE& zsZZcp&xG5%o%o62gCwHoir8?*#bRT%D?1-!(^8Sf>p(sq#yZ&BEyX4|^I;rb%wR+x zx17mpvh3WGcb6c{inVsV+19A9(8_lN7j1FE>?^yCZ993cL`I8rt83s> zsl(!X<=J~Lz}rB%29s4^CqY7N!zPo(chruKshBSZvUNtD7nCNh#{7fdl&`ifUPdj5 zzi|0K8lA^7;t+>d_E~cvGLjs3{Owo@R0kx>m9Ewof@*)cdjoNe3ofS)C3lxHSWQ=HHx2_HewPO3mG(xui_iD8lCcpo)Rq?T}1CEgtq%dxruTy z5A%ngT>#mv&pTZd|6^&rQFPhv`x3=zI`KX4;f@-eG5Vrzo!AgyCTJNof&<*N3^XDq z*uigR87yin!p|G=ILl<1#()42N#HS&Kfm{<5fulCD~m%`7ipi%wk%rgrQA804Zz9* zwsG&N znP1d@YisAbz1M7gNW400d^2S(>^$fo97-5GZAHkWd!|;H6oAi=foAhg#E$k<&9Vk- zm%QrU!QZ?F3Avv=k1$a$(77*}ega=U2br+~2U}0sMA;lY2RNG-7cQ0mOJz+BP}aV{ zl87t5Lg^1075jwdjR&N|_s$vmGi6eV?MgHv<`0bUU82FrsNMHp&6DyLftT^+Z^3&y z2mi{oT}teo!{J7AUdBIA`Luh4JjIW>v`BB9J#XX@vNU0TW6_EdT*U~KIdZW zimoSPV9->l$`xHwu%V&o8A*X^xUkE`kKmT1~J-$oq?BN@Dc(-PQD*031lyW^r6ZWi)=<~+S?PM%! zxC@hux{N+=;pejp%fw@ouXHzX`W$5g0tHo^y}p+Efdd2Ft)aI?N)Ha;eFTf8UL8C} z!ULLh1(UH;axqTlrZmDc>DNB}Lbb2*9mc!SiLQT#HMcK6JW}5bO%S_j=1ePdBj8!cwa{h_F=9a+~aTu-WUel3o0#TdNYKX zw=d$Eq_OQ^v^O$qv)^y&ZH^>Tr(cbNN`@5MF~G20J7mP={|T^-CHy<_ZG5};p5MR* zT16r+{CnqvGkQlx!l%4`S3(p6COqD=19ve91>+-BM4pAwv- zufEX-X)h$>I<78O*eLc8tP3fIcN>2ri`ok@dXnar2tT_reEwo!_IB`~*~bsw5Bffe zM`=~bN^m1IU6i@06!V&!E(n_oKT-jmKVCAiv-_0L{57l-9I9vy`MqL9ikbm&R+>_&AOJ`~E!;!gL-*8Ks#82Pw z1d!*^wFCxI27V8uz>4Kh7uaP*xYCY)Y7Q@$Qc?X_?ufjG%N1e2c3R}33}o}cQVmYH zx@%IUZ}@C9sy+vg?qwTyFZ;7~TV5&V7sJWT?z;sG)!$2z=h=(22G9S$&#txfA^)4~ z8le0OPP5K%+UvG^`x5Zv}_NtSfjCXPwd4vWHlCXYxk z$gxb-mY{31@iI`ywzMD_NcDX4SmU&8rJ26Ir>ime18G-s#8Q|)wd-bYAR_Z5yvuni z$Fbc~`Md6kiqe7OnV{$UuheRXtr>Vc(}+YQuVfcx5;OJlfhdtFH}z!l?25|AyCzzTJfii z%AZ}94~Jkzgsu|y`T+mN*;JHIb}ybD{;~z|P)9yLnGKJP4&u^Y)l83_nd=QCgZwRO zPSRT5U_uE2bCkxY?bU?j*)EY_>@XyYxWquh7VjM`EtQ;1?SxI?8blNw>UUm!q>lP= z^w;y<5LLV7R`yfRXD0%uX;l9#z(ZvaMN$uy#%`RCw0Rv0R9q_I? zePZ_#+Muj3fL1G2@$gaxjN{%Dd&Lss#Q7)^=b`5wjB15mEa9`iZ78x+d#(DG^42Gq zR3%9rmoC#YzP5Ib+k+-1SY190J^gqlOZGNdF9GE!R$r~4I47}0-m-${;`K%|cMz27 z9oA0u^_X3{k~$|FAUol?2GOa~ zGz@y~HKP5ogi8Bx0wqgVFX5E_7Vz|@wu-K)_0*~K$~-SUf7@hyX^e)?wabHml6hAd z3Q+Ks`Us372Ol&TGxdlUOo#0ikYDg4yo8GFknr6N8`%mO+E=2wObIUIQ`V?!_D{v< z0wG}ftIz(60R-I7|HKB#wGOm5^&3BpIRKna=X5fLNY;}XS_Zxqb!SwM*nQDx!udb9 z0fU1>ePdikLW^9E@xq-_c7!g?#0HwRFMTSBqdV1VCJFa7h30}45GLO|o89u=TfJly zE6RIKI;oDXSEDIUwV4p6dY|^6rPg8;6J=u#NIsSlv_O*4!s4-ClqpWmI>xI z=pk+mo+raOl~)vT4F*Tls%TB!Dt8s$21uIst58GOakJNp8}7c`zsc{DH&wlP^DWwV zUvme)JS<$eWh}mU?%u&^6;Hg(XxEMI>&;-Ha}isIc?AQgcH$Ok&MogK0khOpoC}=o zwNPKNJkOuIaUfB!rOCAr?rnG6II$uybm-A~OeivL_S%ctn|j6L++Dolp>3LLLxsSt z#fnd+JL>1(2j9bS@nw4f_A>(5?^SmH8^v=5-G$Oaw~aiQWp@aUOC z_3z)w^Dmd3#;vaOI7P2lz^Ug% zv`Qm%7^{REzQ-kS`JJL0#Bu~lo$N76ytv@5&a`J3pMg^A0D*w=an7G^92vArLKBt< zgZj?19Wsz59^M1r4}XD-hsy;7S$xP*1HT8uL6O@%L&BtLU9_Sm#-y4-uuvt0CwPke z=|_OIIXT^1uf)vH2fqM=b4lDR{%cFU^&VAMPUfbI6uw(DCRN(5wM-uARjhUlQ5;Zt z_gH4V69t(B@fByXOH#Je8%fGEmAcd}m2Qu$&kp_am-|U4;rrFlBJSpckCzusj4iE~ zZ@jyKA?ph&`=i_0xOb=U7@*blK|}-dPUt@cce5$U{@(8KDP zFJ*O;nTCq!Kgwx%h6#aMYY#Lp^^d z<4b~Fe)9{`_jABqPE<$`+l-Oy3|z04%X;Y1CD4~)J1L~_)QvyX2M8mpKa%RzCrpD- zx8`=jLOJ8O$-7`$n|&ZBKC-$#00bB?DIzFxsmmLvL6_WzH6kcF*pSwfyXcWQJt>$9_TwqY1ip zj7Fwp0FG-|GRDlpuo^~Q2fSsd)J3vLAPc&@lr24BpmEco)qF7KPD=79e2&;(fr*iXa;3yK63hzTo(g!bf`JHlTZ6${ zw5#5B9&lkmv~3lPz%#E)STjYG`w!v^W|8licsi__U;sSZB&t5H*MGjno{g^_IeD=J zMqd0BcEMo&jT#X;B2GI99?&jZofb6PEZ5Ob(3!tCHROH3IOjXIpkRKHE*vr zocSibFTj)DBdaqLBsCnv$RhYub^?PFtPZ`y;Lgx_r1{|G#9FdUWYOKR@w1o+!No&O zkZj_!AF5}FG%u!WlxZ5zKyc53ZV`Vc&*RuNW9LKw z?(ws!eWFk&(NOgHOL-#RmR%w<#U;`1t*jg2L^K@p%*s)8(57B)a>BeAkLOU>j zov)^Q5e4o(nz$+Ck5CyId5*26*@-@9kpk0u7LpJ!FZB8mpvfY_8Aa^<;|AQV`ojf) zeg`IbP>y4=tc%_Id)evYtTs%gq>v9)a%tHRL>_2>?7@)ZQ^;fBZUpF)T)ZD+h|~h! zcB@C^w&#ztC{w!p9Dbp=U^vDjuABwM0q$%0MkXfme3yI#CYEIfvz0XVr)$l>uK?N$ zRvtUL_C-9%wI=pkSV*&`4At+o3HY1-;QQ_<9d+H{l&d;x?hpD*rZ09L6+MmvM;A z>HK)nmkNyV@xg<{b&NWT=3t{%zMoIxL)6n3{|D^mEJ*jN!p9?wZ{qjs*-qP6)`_=* zn->qQtw&TaL0kPv+s7j-x0czz@3+J@{Q|er*c(FcTp_Fvn8a~qm*}f<-+Wn~1K%mJ z6xk)L_8ye~bX{MG<|&UKK+owTK~jG4Zy6pAo-%dzajK^bXvhp8fjfbS6rm@JeAw4Z zmCw>$d(40Y+)J{)>ODqFM0%WaJ}WT0z|Fg}S}d1GkcZr{ZUXN}!T$8u+K))UpfZ5XWHjkx3H2>08nQynU(v%yy_HuF_nxab7m;q5bNMHx9wI z!s>^oPz%h8Mk2d{iTl_AuUzA`)dPiy@%>EZ<(Q5$^c-7BOV9Ne{6+mHXXyc9JB{w3 z{c@P@yq=ho zY?6F^*7%^bvJln@0n%`zC*^o&AC>@>1^q03UQtcV+X8d^sz5=JpLZF2Ck5B|Mr@HS1bgM|dmW{l{0ONZrtuy(b=Hu?B{F2s@bL=8u@Pr_K6X zmA8QVuI8*dB9R|>7JFTdcRqt^3i(r?d7X);zzyTCxYV8p*Cd)sb{Ji;SCaBuf9|@! z^IOw_wMR)RRO*{B6H}jCdMf~MN48%>MXyjXK0N)F0TBQKo*hN?Y3dN$m!|M(Nq;GZ zdhaH)g8!at?&5Q#YeU+evqhmG>Z<1Oxj+D11^{r$WLdaG+pEtj6gtSTE?CgasUY#I zUcNG~7Iu>mD)EysllbV_E$?bWAY*?K9HvlVE4Jc0D`7Q`qoM1MpqJkx*meNTh+Yc$B*>L#duRKPH?ror30X7As}*B4|qdHWP5?-$mT8gsauJ(BRSb$kKyI_Az` z2Xm_q$A1(IehNEpEx0{vvFWMKMQlB`#Wq8gMQSks69j??c+D2-v*?P9Ifdw<+A{-b zx%5k05>v6?um$qfYy3m{xXhD+xs8^9|9Vf)l%sK#hcDd=+{Y61ZDr9U1k}3O9%NXM z0RDpI$8!y0-6Kmyl#QiB&};d3@B5n7*`(4BL?;*r!pAcf`5sAOY$Y5}f*TV%BS}PE zgcRqSuc5yH^#te}{>zCO8VUSe1DRkCv(|;0HgraUHX#f57ZUp4U;G{qXnF1E2{HlC z+`A5w_%krqgNsg_ZHNF02!%q>6yRoK|Fm1!o)141I%ji_L$?H?;Giwxhf` zY72(IKSmJqSz`re@mf*N4~7$lmsXz-$*Cv!rYiMp=FOmGaM$90c#FPO=`)9Lf9JHC z+2~+*y+S&z0EQNqPu|jEB>Y#PSNt!aXEGZNSJNw`(pqpB(N0Kl6Y?zgK-Wjq#HWIA4j2=Lv=+3I1Z z7SzdMukr$RUOm3E_AltuiZX`2z^-(KQ*nMRa1@`$YKPS*3Praii>~|zLoM6T?aei; ld8#@;8ayOU$mH_D8)9BB`jb_hAp`udwK{6~#@zGj{|AOTZ?*sc literal 0 HcmV?d00001 diff --git a/priv/.BUILD_images/rebar3_shell_start.png b/priv/.BUILD_images/rebar3_shell_start.png new file mode 100644 index 0000000000000000000000000000000000000000..ee8e1380bc5343303d1acd92dda3e975f2dee775 GIT binary patch literal 88661 zcmbTd1z1$=*Y=GdDlJliG)kuuQZsZ&BZ4%F3J4N{(u|apNJ$97&>wWP5U2r#4S0ea2$PCASxM-)Sqewtd9z%wHM2!DT;;LclPC!7} ze)jJ|w@cv@0s>r~ri!AGm({N(9eWn_dK~WV-glQpTg+>z3i?TnYOCuW>hG@7Yzsct z`ji4zB)NDo2zL+o&R>~6b+3)VJ1dc1a+1uh@rpYO;XIlli0Fd)M~hFvPluM?;@HJ` zO~`%r$c~f@x1FC1Ea~3fgJgW(*xmrlSxyw3Hflnt#7CuHeJ!A|Fj^XOGTF zZ#AcRJ%_rG0g2U-Axl>U8eExfcebi@to>|~D%{DjMMeE^U&;aZy*ww)oYCk)}K0_E%g^b-m_aftge`)fPVnwbOX>m|?) z&;B(Yqxx-~Bs%z_xoa$yo(%-E?}keBVA#uHTLfh#MwPP+qT}9wk%B8Ww@s0PcJ?&`^L}mW$F3m74Q?! zopk6tO@fM;8v?YflL^5FW?+Bu9O&(Qj$!3N#i(*8vf?KYDywD}z%l~attJAZYacwj z^L|l?U#@k(eaAO=9tCqmZ3C-tcEC|jkhl=(djlw>S@AL$SNP?+)3!vTB2JX+NjXc0 zh=iYxJ)J-Qz&ijx8wLr5j+0VQLqI^?JYV3+?UU^q;MKVk2OZXs5roz^KV)2nB?BA@ z{MJWhj=oc;zGNc;;ah*%4s&;j#9*T{uzM2nn9x&Mi#EL|2xpz&Z4O)Rwr2?xlMi;V3 zuWb5@zW3jTD#B7yGSw1ASvkK0BWsTwaeAO!qC;zf?yVSUkga` z#3X{(h5Y%-{vp&MMu@a=$mzvi6r0Md%3VDqS>A$IRKOMP6fqiohR6GA_;pBq ztjMV$456Qg9b-QXq7f!F?+WfEcf(DrHy@yyZ69ANC@L4V z$?I_2z%E1KvKj0@XYYIKK>dXSKyP`Zp<@>W?yg+CP4cac?OXGSGhCg2?nHB-<7aNe zW9|BR|AcyPLN*%U`|gZbRqOSd$MHL*cOgmUR(`OM0n7G;yG6_=#OAU$UCIkdtOc$= z7u0k_3&+-i-AIonKl5UB=x=C>EJA?Q``M z5NffAgQXo9YZ$Z!w9}Ivwh7EZ5l{M|&aGEW(eS%0JzKoaKc zr59upi$GtU%lX8rqCbTC$AQ_Q-Q9wWUK_Wu{ z{bmH`DAk`4-9>Z=E_YpRU{TTrcy*>y$yC)9v_bZDolOaNqFOrY^W51KYErCfFF z()y9a&wecs9Pp|z2cGc+9`6-yH(f_sePTY=z30m!b3e;9<1L-51vZk)7u^oK&xa%= z%VC2<45{3{QQybj&e}+m)#*9U1zFuXfgId|{?^6C4DvLt-nR-s^g#obZumS>xG8(+ z5=3Vqrs&(1!2Glg=d@ev$H1nsuGiIh5jjJ<8O227xu@dtNMB03P@R=`UEq2n$)k?% zb5lwSV78VaNRuKY^2=aB!)63z4GTF8EErrYHdpRR5P<;e30>lb%pnsd1sk4WW7eC( zRKf37teN~?J+J7D5W*fE^q%Z}hP(&+eX0?=h)9l%^F_%U;Cz(WK>IoXizToH0GbK9 zhPW~d%A`r07P{9v)d!9)1X%(;VP-~M`q}<@WtgA8r)ahez-$HMih%AVHk zU8th*^L>|M)bOzM&n?9Ne6`RVGOeJC21{|70Gjjdz&5YGxw`^IY0m$mFsxeNriVxr z_AR|n`;*QrZW_vJ@b}duAW+zhGuLQF`m2I25->~rqfE|r-Xif0W9b z>dU0$JQ-tlpcD1_<^mvhcEBVqOb;H4&A;QSV_j^2d~*J`_Tnr#Enejq)Il)ZaY%#&}eYxNH6!Iw3#9 zw@My43IgoxOi*qqiCFrmcWkBidzgWUXqR&on_uY7=VT4IhxKPKE!;=hz?tP4uZ(^y z@_UJSn0iY_q6BiKy)R7Q^1e~xPJp(1l>%%cYekiNA zxTq=cJ>#$|ktO?;nA;JECcULs^U3b}X*v)0P>ek4mjX!WifQ<`HhGiKRVsM>aauTT zeWU5PRu>`d_r8=d#vZ*Tu-gl_Rl;&1*RKV%T(n&0nD%;58cGOEI{64s=nHlA8+T#p z&pu!y5kbr$eh`i<*w&Af0PK`=9i!H_R~S6txmf-VWuLM;O71xskG~`fM-Pte4sJ0q zb|g&xeG3SxGV+}{79X5!EL^gT_~^G9%9CrtTQ(fqcdbO@?i?-@rK_D9)_HU6bl_+` z#3Q*3@%U-*>Y$_4o{Gt^fIus$@2lALRpZRU-L^u?_vMIMyKZpi>MN>P8h>f&ugm&f=^uwriEuul-t77s@2GZ3_e}^kV6MpKwh+d!ZxoVyP>ME3|Ks-}8_km1evi1NSFpnhj;R z^1rY;BgDqk=NKF!C7(22AC$WP@%B7TUp z`$T^&eR%@+W>H(~cAgk2-PmT*p-3?g{jt7U9Mw#C>oY;0&LqL1W-(O~)IU|O^n=CV z%HiMaLLj6jAj-Y|R%-7y8%{8+ahv)ck+>k%sQNh_bCJqm%P!COW6p+FnjUqZpf5bV zn_U;>Sn?FE%l!O&N5TiT;q|n;l2@N*W%0`%U%J@JfOj?>cO8stmm4x)Sy9MB9^W>Z zvS#zWS%#r3uj{PLQkZ{)|$|T){Ca{Ke?a(9D2fg9onYy zf`{){?C31QTNkWZOl*5P@DTkd;j0E2B{YbW=RMrFTwVaiRO8=837#p}pqkoPvx)lQ zE377{!BoqlEP&P~W6e`x%&HVdCja~BU(os1vLJL3Ho)=f;iD`Xo(zkxp@S;nIXk&^ z6fG())Klo(y9qe$UHWhb4hH4dt4QWLw}hBp#0L2TpRXH!KDv5GI%J7q9`b{P?Tmmb z!xJh~rPXbqjM3gu&+aa* zKVwbO2jMZN6^x(#dOaA(O0wg5bbbrBwDu|YXlXQFMdvKHMldq6`D?phUc7TV+njcs zK8TpVyLO+EYU9mF3kV^^WDqnX)|ciOcq??DA9vCJI*Du=NLCn8^yQ*6LP6U);ef#6ph5P z+NJuEb;YQ>_8x>c1IkR{00wcZgd|8({!F9ya8Qf;$P`LwW*gfC;-8qQYLLs-)2`LCj*iB@!rMY zF^u)_(TdjRvsg$#z#U34t?Cb{yrWZzX+lMThJ4+orY*2T1zGFEg*v!+Lbwz+- zP3SdP*(A7HmCtZPFL({dB(&+Gt#tddEp^rvT>3{50s?)xf%`Tq)mKvT8xlnFu08Ta zhz+oOXO2j;Cm{5JkIH7FA?_b_H(e;XL|*UEPz0zfqud)tw)gtAc=BER#y4-A)Tt|{ ztz9z-`ERFOdBadQIFb;^&H}bGVBb6Oa7bDouflSY3s8M!{(k>Kv-vxqLtq~x{mAc> zdNtph^xd|(N6k&9t&3Z)B|B7{Ju==N@7zCB0MU&!rE#J42Sww6|5F3djjJzX4CbfMJj+P|=I9A__7fe%^FW?r*X!_|m_8HvRrY>v z@a)6UWNW}PruGU=XxmCW=r!`E#yv1YjexAm=HrD;T>jS!)5@qh?va(E-Gzj98}E-V zuk?$g0?d&`QrNvc_v}zum|HR1VILHRVO(O+glL^ypz+8GvtNGj$=3`!!3UJ~ChbZr#IxbG3q6_lj?o^ zof5y@QM!$y8Tu#dLV7~NkLgF|dPUYpH8Bsu!wzihLRQt9LW$(Dvvn-@flEDW(EdB- zI3o)z5uU+}jSJoU0xP9-9!{JJG*G+uH1(HRPM)Q6NZqoWx>f<}>9i-~`|X>0VCHM- zD`P1aE{qKK+GHA-*6G*{`C=oSo$GX4M%(Rbm%WB_W>4y7DzCp>8(bru+n=lHZWFj* z;8{d|TShbu#K13bN5!@eq7At|Uf*R+2{4hg*X@r1pU_>sRh)L~eereC592kjkzN5) z7_os@)D$;}=+pdO+G<);pT!OVjS<9JMJQhhdV0lAU*{$%+wT( zx=)BOaLaiK>@=4emk$onycpbg-C>Lq+){pfQqU=qNT19$i827A=jL6yTzs9vodKkm ze*QQh8*QVCd)x+2YOaTw)N6y&_Tmq zj}lN6mp)Z46(1Uco96CiFp*tSakz(Hp)B?fS%+MbTb9ocQgmFu9MA3dZF`yMODpak zm5md2F6COVp2+*3OCpzeWuvm)n`mTR$wV)K<9Z*VP9Iq59C`)#{MH5-I?gmdgsm2S znn`+rTF9}R$<}G;UsIzdXE4bM96C{4;eXxh4ycjvK2Vfh3w34QM`Z{&b}-~WsY)aB#+dVR;w}(mCJd9lBdK4MK(8)Kysn$$vJX{yT5Q~Ez;M~T zYLF!aTezNAIZt{`>c^@1!Mn@XOW)eBKX?!(t7LcQRXoX5n_c5zzwdV;*6bYU)wzh$ ztQgKY)ytTkT@i^B9|GHb>x&M1lSC?Gskh!A4lvf&v9b1?%Q{8k{ycv*XtF|( zFie2YFS;%UbTm;nEu?U`3bsY*9z(OF&2l5JEHBqiY#N7wlk58-nQstteb`{0@ID`- z2K9dWvxwhBYY>nFFaBwDjgR4MevTLl1?i{+!DX|bdN!eI7tWrpbd@n7(J4YDhfG!F zjNAoR#jl^=7dGX8xf(}Gny_}DQX57~+cuPtz&1j>l*sW~#D3I;0w?XyZ9+%B5A3Ip zVJMY601=Mtk-J@fkuv$2r;dmeS$bJ|eRpM<9yF6->VV}nQOjOUe~s3zyYy6nWJ3l7fc=VSiB9KFNlSg`ql{F;zIhY1KER+pMO-f|zMj1fR`#&H{{mB-A zv@cSA;>2bA=Ou&h?t}6t7Q?Q5+i`vpiR1VMd>E0{4EjPz9n*7y4|Vl?*=U4hLM(JW zDdt_@y5Kso)L7fd4Uo(s8~%ad1Drh&a}J-0FQxG&NrjC+(yU|J4)aKtILRm~mZP$b z+YX@&OwGDCd1{ojxchwblKfP?o=1g=$Dndm)gj%B@h^QpA|(7W1V0LBb7J$~%58vl zQe($bQPyo?&r3p&KD;eM4Nfw$AC!Wu1f(H!LFtR>Ro#P0kwbt^BLYL55mh^e!iNXAyngW;-VlE1NTu$z zdGI8JyAH%Jb2eQVhk1^S>|n!5jXKuvv`@uEp%$ZIp~&E?fDlbfOx`TcAQUE`KNaE; z%`dC$5jY^*ATl~U$XO!mIh?ILS>UME`KBg*x%App8#wcZ?U<3Nfip&{2L5ZR5hNUT z0PG9ncfgqw8AaOynI7n1h24Dl$<|s2X9{E)4Pb1-Jb`vP+nF2VzZr*K@}%O+U13 z66fKmYKGqLES-RLTLBUILwQ(Yc>jEJF3mXA;X09dwB~c=F&>npL@ou?HitARi=-{ z(fh!a4(JWXOOYY#r!h8f>3GTjBZfN)mQVs+PMxE0 zBXHo7^smFq%+Jm-M!NF8peR8h10cizx5;(<&~8e(FL>&f3)vX8p_Qm=PidBR6*BPg z7AvHY-9I{S3^ud4=i+thIkkxo2L!`bErdi@ru~R@G6i|QyUU;Gi;^>02BHb>eipCy zSI4<|86@#XaCKz-GUrh6J*&(p@DLJTB6vZ0>H1$m`$p*IKhk!cAhYG44+=9x#BXUD zyC-4yL=GFUz>CNJQkN(Hp(X_Bn%%sjCs&1Eb-&uZIY$G&2t7=npt>t_;@P@?Kl$1{ zfwB#L@FPeo%g*^BQ&#yzkG{fcv=?Yfd8|=GYnZ0#EX(ygt)1}Nrr-Kf!LFq*>Nf;= zx+yj=LYP>|OT8jdvT+mp3zm?`ux1Y*x7{)c0f`blz47;`*TwY$NBpC@Dn_n3gD*fiLgy#IEuqeGh&HV$H5rTrK)|;$tp%a<9S*Cn2zl z>a7;KDIK7l9UAz_FKnzhAWf{)5XPWmeKIEq#5VDD7os<-V`Je_JzNKK=fq>F=wIT& zUEcUjLHwk)+3_OP=c+28PGxhOatKgVXR~3#kMtad+*J{OMb>+=j@R#aVV2VS%4&sF zhM1no&@`Klq~H{y4?Vm}5v7@boHmSs$sw(NX4dUb>&T7J9rWERYqMgxSzkxT%ik#a zIhYHTX2vg21OE~L8pk0vr0Y!)OSqKPdLxTU?eMXsWNtQo%lx6CGqLp9k2>L0RxS$& zit3sW`IXxjlgN-C8Yq03`qo8(~@P6n&g?)F@f|5+l}nBn2oz0FYUMwuq_ZPMYb!n*OrMIgRqH}#I1-yP712| zEUo#Ql5dO3lbe+m4|+Mo_WV*ELj9iqBEw-G;fCNJQ-0|ldhPAxA&RKYKy=>-G0 zC=q=B!i)+D#d^W)+`iJTwm1E)4^pQgdJo*=p%QuzTuyc1E$7$V%e0okmShKV9)Kt5F9Hj&ci)Qms${$KICE}h_EFiGQPSV6KN$g zLs#y~n=aNzmdsbO+nWx_Xj3s4w-4g!*$u(a_I{Mts{T)IptZ|!lJyqdtcHW_2yit9UY z$EyZS!p7)sZ3fk)D}PU@{wd$gW>@+Wo_o}rPm+2v$w^z1r6aGZLp9xR1V+q1E<;A_ zCnY>y6uLI1B>eW*_kHP?%k1DLM}8NT+U1;M`{#Dt?euI>=oLoVC=VVFjdk(|jdysz z)WS-m_c9zDU=@v()YVD6mia$3%2cni_0`S0OBm|Pt6F+EUg;M3h3h=pN;$ZNjA0+5 z;rPgKJ`J=3vK{xe`^F^_87bd>Z#gNf_qqM#!nJZvee;|2h0M!?cJ75lYDBniHL*9T zXUh!cEyx7QhjgbM8$&%IX0|W)lxb9vu+qyUt%)W`+H59_odJ4 z?&~p`w|dM)yv(B=KDHGf-cjjl{j^~Ja_J-)^-uJu7at-dx=G_1oiA5=yPxr&DtpT! zG5z%yglx|IHQ2^14LtpWHPZf zXBTlSRBwJQeRNd9?1%M4OiIrgBFFlFW?e7MX7b=-KBF0T$i-JPB6zGBH~xcWEIv?* zI_=BGO0uu8<}{Wg^}2*G)hAlF5_dAHnHGEbnblCIIfw@_(wgPHJW@#qu;iGJU1d(A zHKKJZV9Nl>_|dh_tvYq$4NS);ae2tK=kOVHyta3?3y zBW;NG+KCt+*bV|qz894Xy@h^5Qa`L*wZ}gwFNE9p0RAKwx||bmBOy$P#JWkU+<)Qa zH&hYZ>)nRwyB*z7M1YPN_TCrROnbupyYCK;(<&%fkFhyFKz=hnHxP+RpZ#*+WRQjw z6!_nAfmO`TYv=9{5U->f`tVC^W6{nSReDN%?sPHx9=>}y-@(6T63A!poI+TjzDj$f&RsViuXo)h8~hi)UcXKcr&RjFSk_p#>$S0O#KDsC%_ z=cD@%q7+LU(Xa)3d^Ms9eG*}P!Jwkvbg4E#G5Owxd4r!E@D~=JZo}V3Ux>ml7Rz%> zuON+#gXnVYk?ud+J{SkQKvc#h0YscVeaB^+piVKmyhDs$<^E~>@+O#Y@W-7eZg<<3>Bc zi_HV_`VZ@an(~f`$B$$f)5rc8%ReqU&_wfw?5sC8s9@rTTt(JOq+B zXYGjA0)}pQ z0q3C|;`>W=0$w}81>UY1%~{@AV$5>P_&k5~agw!zXqW-GY_!XR8L2E z`au|h8_gQ`s}^fobzuTxe#Cm&Pas^}z&&m>9NvEmDAtR9s5@Oca`=D}C|qiGBVZjf zf)IIfn&rAFE3NcIyQSS&kZ}og{WK;ObuDiKCh*Y+SzjF^y$RyCBpUCIBr^r=jO7|- zI!N*SRuX{-_H1??)#@`D!4?zwlKY88&UlX^^%h>dzQI?AQZjE?wYvbUY*T0CgN%89 zmrq``GI?wh9IC5|q#qi`*4~Mo_7H3Fl2B1Dl^biJmPvbbH8xzxuWR#DQ}^r=$EDjh zvKtU4?AaQgBZ|P==zcm<`3q%8k)W!R%?sHM1J}AytK@UDR8$^*hES-$nDNg&V~=qK zw<)qAJO#a*TS|Zwr;I{dXy{lg{_Og|2yOmqy8K&3XtRu2H=-FBCnD`iWguFiI~fJ3V9EL)>!Q< zhpUl}aixx@DFs;deV%&EUlDUCWl=g8{vj$nNSq}uX@9?}e5vzAQaGLH^tY>|_GfSY zMe#YpU?KJoZ{<~67lSM$t{eF3*bZPa#%LB<4fU@8^>hPN7{X2HrO4bANZCG4gmM)n{4u_((eEw#+@=08Z8_)lGSZxtjId(N9=>k^KENd?m?- z28vj{Iv4Hn{inCaq-p!k$NIi~QF{1$xAl>;l2QG9K?Ql33611>>kt3zgr>)?vMO5U z)+1^IpOw=ewy(9msUzfEa(TXxgFXIAB3)91yXD6_XJ$X-bfp;gYd7nm;$`OuNNlNi z?;N_rx#Fk4))tO}Lg~eh9nFJ%dZxV)?QkK)-X86X_xX8%>H|7n@LQV%Foija)+u1y zXV;YCvh-KhstKSY#wfQ7$`_ysE5`=|)6j$7*cOV#-dz9Etb((A!H{Jp41zqIW#L~rAP$p~x>u|jF{&>kV+^8(4T9Em* zx6gIgD7>@9&s~#^e$`XduPL`)Mz0Q+L)veWSw;e9dhq(~I$@gUYwO&sbX7B{%e6(} z7s>%y=KPl$6PZDD9rW0XbJx@IBVDq+(gxpuspB1;;#lu{>Z(+R=Qd&r3$ctwFnY=;spl zlTdVXHZUlMglc%41iq?x|3!Oi!9n2c>nCR0yU)W7pFEAYUg#V1Se7HynWC*?z?bZT zB&`)`c>78AEvNwE_CF|{L#+P5l|K7Q_c9hL7TnRbVOBbXXU*&W_XQ@m%e{& zuJ|X!r!4d${z!ke7gV(8RD^)wOb)vS9iP-&nC72ZM-~8xcETNwqf*9xS9zxMUGy*F zy(3q2G7l3x^!%>*O?nyw6$H+4tG4Yiw`nPt>LV z)+FB?W#j8zO^!$*niG{5Vi89fL#MvqlpBV%lIN-2eAjNL|9}#x*ZK(#TuV>=Vf`dH zk->y=wTQpJ%QiKKnBT69*3>GUgK6yP=9!QX^nabna$fDM`h>guG||lt_8kA(=1WhAZe&PZ$ag8t{5Q`=aE{EqTs*cXAVBp7pG2ErR#W* z`}G7^z;mji%ri64cJ7dUFKiY_J&)S~V)_05g&52wWZJKvDeX|9H|b6S6*$BIEdsmN z(&oN`JA$LT3oP|3IzhH6Ok~FJGcig7E0BO|+5QK_u-E$+V!)nX$OJ*m51}S=mN`Wi zhB#Xd0G9hkht-e|gR$X{6sML<2=Kasr$k2Or; z@wXSl>LZmr)|AqFjth@t9-@n!Ciw3fMtllfH0NQK%cC$$xJPZ>%3-n~mHW8my&-#* zb7zx@8I{p%xu`1=t!#r=!DAQ`hv#^VvlkZ3FSMmM7WxH&cV{4?_QmyUS8hoJsFpz# zo)*TIg1q_(?M<*>Zik478OJlTTHaf|SJDS!0m5l8oog?j z=Ubc~zjoM&j2sY_)hL}y=_u$C)RVCM4R6@e2^UPT6(Ra2s zJ=^SUQ$52AQrJH`3cEA^NNGD+w@sD30*<3@qFC|3vAk+}_D&W&kVSc&0^I;@qn>3q z{EZ5pJ9BA14g$OjNgsgZpK|d93tVnpOlK8FXmY!t#QFC2T{KKhX~Fd01ZQ2o#KG?% zj9DLUe7WeOG5NGM?u8^pquBX_ry%*qSvRmFfa0oxq<<$qgfAXQ00Yjn;ear1>jCta zxNNfy8~E&$Lb-Qnulo%YPDR1FbLo$>Vd4fDkFW0t-WDnyyc!Kh`JVaG zk^r*9@A1P`2ssbV>*E{a+hNo+btB$P^LSnfN!T~0YUH`1o@&zNkssJKe`is`oh(bN8p*sg;Bq zS|~(+x3=v=NjP_#Fw!m=9G!dM?bd+j;=P-kwZ_wKl)zO`QT!M6K8A+g5eLD)KS5o&3Akn?;Ng<{4 zX;T2#Iy1~TmF$V-3+AO|7jFo+5a`wq#MWXj-x;sa`FSbE7iX{uY~Feo7xNt)F88=u ziwtOv9E#MK`PbaQ%{m&KPMvz=kap9@ui9NUAKlST+H+$LrCg+)VCh&@wV4FKbAcn- z5St~TrF3R0;lYv7qHZkzCvtTvLS1|$CVE>CcOl~O|Aq?y z)<*=$m;U-34$plK9DjTc_P>1&t^DEt-Q_T3t7D(|#X0Hj*T*eJhR|u)GV;-JxA>&0 zxZKGU+rgiE5E-lY6oxl@>O14T4v#rM{eQE8vEOV!r0SksF(~PXLm7<+`uF}U!1S=r zJ5tLv@{%f2Y{Usy98ts$i(wjKi+Wnta|pbzZ)o78XSXwC8(ot6;KcYX_(${DU=VrQ z=9}m0wb2iAc&L&2-MHN&Hj!#O3fxA7qpU5A<_?FQK=k3)33Bb#;J{y|BhleHAm_;`o~hESxccLA5_j zU?KCK*6HGfO*pn#9A;{NmXG{}WN+u0f^>&C)5pdiin8Ck?=x;gW*va#6klN65u+pb zA+}|AXfxRa;1epdHJs$l0OZE~$Gh-OF>`b$fs>s9IqP-@r4!M+R=X!JQ?LPvCimqt zz*hWVrmR<>&82#GAbY_m*+zXEclF0Xf?=b2*{t@vv*;wYCCtPA+ zJD235h`V=%V30fy3Vm=D|02p0%$-N5-S9M|Cc((VTQHJy`V3KT1smIF4^!wlzQEIf zjQ^kkZe^;Qvw<6D<$%o+b~n2!i0Y8n0$z8&welv4!1DkZo0lE*faO=)^R~`=SBWMl zMl4YR*9~${Q;kq3SQu$o2Ei1XLD39-NH9|UxT0BFS?ai*NCG^5eB4*{;B_M%p9BH( zQClS*BjT;03V`T#P9THfn8{UN@UYH3#@a|9<|>}qRr}EB5^UYL9)r`KR&FP(`&+=c z5RPyItcAQsYHgblEOxqKu|=Qhvp*!}UvRoJ%q(Q68nWm1=}@7#eMoB#X05M)bbg9! zrLlP`|9LLwX_RJMjN$t3b(KuI&kT*wx*u#>NWEM9;HfPg=_%{X3L^_wO#ju=D~&B4 zp^4Ji*B^p-GyU=6b{KHR0%Upfvh)mGYuPVf;Cf!lC!o^U8c3DKq4vLVKXj7stNrVK zD5s2DAE($iJG|*x#(_Izzx*J>Z+3|dvL>gH>UnQ7ZJnO;KWM=6|B42b{lC+Ivj3m~ zpOC<@ja}uCX-}!!ow%z=F4C1TdLUc>X9a`Fep?3(TmCgxKAlLk1Q(SY9gy0)83E?)^$FgunsFU{`bw7hmJ z>8EK}+BSf){pNz!F0oI4_(RO%|r5}yq3s{!MtRNOfQ zpi(-y%Kz$B#!H+!T~evSBbreOF|X@}&(iP~YJ0o59|ISh`5!BM(PK}Jyx?-H+d8m3 z6nN%wU3jWmcPKKKUunCKtz#a=@v<@HVa`ZEg_0=Xvx)z~Da`h*%ifOCRiJ-niiov2 z+E&}CGr=RW1|>wL)a0)3v;)qh;@U}yDJM)4W}{-|ML@~(w^9!d=mfxKPOzc3mI3go zv2I?PBm4p{09d5FRiRy|G_?t;Z{QTdB9|sN5Voak-eYTqxS)flF4m(Y8(}^NfhAox zzh%K`{BNY?>P36|o5!;1MGSB3etd$Qaf<+Dpa%?_CtT8gf>ipG?Zc2|-CtnEr(BF( ziNR${AKZ$7y(?R653>r}KhXHPe1UykPu`Q79(02B-{<|(E0?&}a~+T{8?g2#=HKYP z1Qmv0SC8p1)XX|w1i88`QdITNZ%_jAs8*`E++V#lvHid)p!F&7Bft3tI(wy=ZCYMP zSBo2Ua9^8FSXB1haxz11X?Ydi@w(L@gjFxVg|FeQKsf{FM&2s2fyx$wOe?RUGVG-K zcKKjgchco@t$F%{Y_#4HLqO1{lfO4PYfW6SK2Fd9=aNPi**#ig=il3^l=QrIUia;6CfXQ zUCLb}AoAoldf_#pJKRw5Pt_@9@%ly2;b}Cx;nSjse|urJFUtsM&O-eSB3f zsquUWvAXwbV&BCe=z}ThQnJ~)v<;D&g?*wAlBiI_ZgzG3Hx z_oEl&Cu_QX1~GiG60yiElunB7duhf6u0lvSuEh9Q?BffdzqdCmXCfvkx*OaAJL}Wv zE$665^3ITcGrVq*>xLc1+aQk|YvQz{{j=yuIX@*Rt6bK@>`Gg>D@o7aDZ#mCB@0t% z-?&Hfl;ZXTjdmn#kvE|Podftz4r zoSEG(Ypm305B5YKsP`s==&N2j{m@V8C;-;^X}-n>-TL#I`WuSh#e%NBa@>g>@ye?h zeWuJudj%B_iWkCpK8vPglF2`cx*{-tA@g?w1+!Qpx7=c$H)=RV-&e4-BC$;<&z|p{ z)dmgCTHpJcrP38KzuPIU=Dlgyx7oDf$_{XcCupi_`zFUXsDrc0!xrA=@t%|kZ!Fm4 zWPx;~J^VXW1mntQ3w_gzG23G|)0P@o^Lb5reue_~ZhQOPy*K|-=(Q?r#>H62a?@nyvALRf#EPT5b1W|t7}5>)GLnVI^M#1ruv*&;be zEw6xHZzS2{jgyt<-|M;CD86%5uUhx6%`KIqEjol>NMWb)_tw;2?cwNhhxVXl$#j;q zAjugssaB-jG%dve=ZL3Ps0~0ciWz78jd4KR{ z3WH~VU(lA3mH$Av9$wi#?=#57k#9+=w_|6VwfpI;qD^XoR$`)rEQRh#xh{3=i5!Cd z4$9RF-$~KMpQnc#%8%E&jh3}vkb}j2X4;%`ALhzqc{m)qYM9DfC`MX=Mt6i2y84!W* ztT^5Am_6@q#akED?as_(#0gLK{K_yEvQ-)X*&FemUR@sZcN_#vTKlxuzxee$@@6+V z!5h@K(OS|5t82EX5d3l+XiwVY0cW9pJtr0SZ^sY;vrg*Yb~`d7t9rG5lz=De=#J#1 z{c}OT7wd4Wd6!+2e0$BXXxk%L??yrM>0withb4AZzJ$OaAb3YdN_Af78XUW|IGL{} zKY3i`&U6_(dU@oy)32uSd82H$)6z+0XVk~{uPff1Pl;=-!v=l)o&nw6%hFvdmO;QV=SpkE3}!mR@it7cz~2zTZ@pitAzc79QAq30v~-v--szC z=-Fgt=Rw#id+f*M91^(Wmn$gf!?Qv@-UNs5N+F|M!#8U|3qDJ=Zl@Mdbv}XYtXwe| zdH{WCyZk`32+ZUQ7r6KwEp=h6Z3XZ8DylU=`05U+$mRQRukg%A8lDw?)n>;=AkY%j zPVx5Fk?}V*i7*cNDaNe23p*HjA(CCurD*wIWlv8Jf zQ(mKD$|f{iO8VIQx1q|ZiN;B~h%G)cxcZTuTEFoN9^`D<-SRt%3_exlri)|lad=Ty zoqwH#CH6BbFYozSsxF^(leyFPr73$z?RTW`K>gHI4r~S-H!9_pAH{8754$H^I;h`y zcbP+=-<)hwGeB!mh;>ZK`K^W6l6#6b+Y5H zBZNG$zaqiv48g(t~)oi z(ib+Ku}qVnUb+)=5==7Eim>}CS`DlB8m5_C{dYJlkE_K5o7BOoADA1Jc3j+hu2DH4 zslBb&Wt${gK{Y;KcY3Ywjn+J@?SQhrjb1sJY4c2QVbV3%e%K3xKz^ih^5Ml&>94CL zE*WG2wjs7uKz^to;?eVc9o%ig=lL}nug2T-c82TeLaS_N-# zWpaGq*Y`rk7n?WVr{Li(AQ$NWw`)kl#6n2vck>Y$pUxE1lcqI(Oa48crixN%lf0zP zxOyX9FWNcmCjVm$VDNTbF+mpm5#0{>GNf*`Hxt&Q5O*Xp7?Zm zkH;0?nzN$v@~l^=uUe^u_7a1`C!mLQZV_hs#m)P+*7l)6SJqapj0h@#7G=vw)ixPq@9*>RRAdj0XKl+F(Mx@zL^lnV}+ z1ASBLl026CTO9&o3=l9daS3yp1TFU-Tu<0KKD~6KO89))VWa%JlvDol7Z7WD;lGq} zmLmAcx!0BSra0RoJqZerzX(3Rtgf9$qK8k@0CDbd*8XCrJXb9~Gt!)7R7Xc0zMJJl zz?&RQ&OK|4$cJR{5)bH``7kl!f`!a>@U~Zs!HAY!13qu$1ihsb?5o%47TM#bKfmXP z!yaVh-t?_Gr{cZ&Ctt((`siLtE>g`44atW8RJ%6hj&gbx*fVR@G9!1JnpbD#QFcM! zH#ca5nh9^?t6crp{||NV8P!zVz59YFsB{rj5K%xn0)l`DgkGeHAP5LSMFlBAKm0&QUfaG%{nX|sj-@_fN+HqgD4|mZ1w;ogPIoIc{Z~Za7}avju<}i4 zgw?v-9AoV9DeS$A#G7Pt8UCqVcO>Q5#jr=qcuSL!2aAU6bIzB{g=W` zm{ZEPBW4d7k}_z!*Un7EG{D?2jMpeHR2yb2hOc${99VZLc69QEw`cZGHr#D1(ZGXJGwtkGW=W-slzKm3)QOz!6(z;@4#}-~j2>=}5z8T$iWD?zvEg0X=uR zeAHU#pY24Ps9inN=arR{=BxFE>JWZIs-1pQw%gdCSn8-vB4*x zvUxg7pUH4Oc}Iupk4HQi)c-&gNWS*x{AD-=fD5_SWkm>wML;R(7?#)CIO3naj(FxV zJR#~qNzHNDAiW5>QUgZ1tBuSz4EEC)lGkt6}a3<;!|H6}HzF?qqFn z8=KvlvgJ!5PDGlW^IAeMwym|Av!=wkXZJOp&8No7V-E>Y$9p~3 z;P`=O80?W6v9r=gXSt{6baxs;d3NQ(n}@FSA2ZOk2*W#4b8vId?aOh%t;a`b4UM>Z z>v+8)k-9=`q4Bc@ABo2RL0BSoA-2KoC*YFbMST3dU_8g25eaaN%iG(~`>uD-se3Ts z$tNV>6)aeM2dVnF#rLq~zuXL(p3W88!3XSkF`grvIt*&1Sg>+geE;=q765UGznf<5 ziweeHiqvzX zOFi5G{KJfiL8?pYIzrL0OpPe#r+((cn(KAxZg8nJKCb8Dd{9@a!GUD!@pC7)I6$ni z{zo;|89Q4^*w4agEWoWyc%v53yJN#?S=_dz)TJD$x$t z=E)`y=Pzo`3IUou_l~OG?YE{=#u>ZmA?q)s#m-N$iwMp=Z+{~;dLxQ0e#3Z4Gn9F+ zq_CD@Iy_=3yD%+@iXw1DP$&i73|VeZgVFrsHT#MEun1|rgRsu`4k`)|a{-wgZ<*Fl ziJd3|-_6~eeM`&j)D5mChw{xnKsW2$MRv{ey1c!j3bAVXZWhy_swWaNXy>Q%S6K0S zKd;5jW&=yZ*Uy`QwcsmXtB5FpDJ^1e%`0ShogD6&MPpAr#Wu+v4IJz? zsxrQ&BuDl|%^O7IHH;+w^Hy^rkf9ARL^v)bOZvnk^%d@QCMH)E)o@YS-**cz6r?%9 zbTL}I@+;lubiTu35G|YW7)^japGIW!xuK_aEmqWj7y@2B#QCArWgg4Fepr>%=GyTK zkPy99f2u19@vL3Uqg83P6gf0f_`4q77WmRp`h6KxdrWNeYSIQe-UBJ!^s5r)C)}zD zl;i0h>famaan$V!ciJJYyKKuoYm1m;sE${Y643U#k2vsOG5a;e&0MSM=mdX9f$0j* z0wnO;(G|!l&HP;n)ewVxLP6K@O(~V|*U9w2iMWK4{!e&57NTKTMUk7kz$4hwYUGiX zp$S>Z##UUq8%xuG`;L)PM+5&3UOYfxD>9xfYTvJZ(1Zc&{C*LWj?Lxt4Dr*zu00-A z(?$q-NTBnk%UXQn9^ntIi+Q7GHtG%mg*s-T>ZgQ?W1G(p51N?`WAAsarNoIe^Kk$7(7gI>tflz6Bc1o_hROnwP zx02^om~SkX5=-i(>C)$pI!(z?1G6HVzEo`x2)@7nvTf(UeWQLN+hPXy+}%mRl_@8n zw=)0`|MP9~cUgsb40MhZMS)gcb^mFx-jtnadSoPi1$H7~L`F`V^GMch!#{NQv3*o4 zT5(@FdA#(8%FSDQeg-^&_S!V`tkSzpi}u}T=3RQr@V$O!)ndZ7JX*20+VypWqHXW@+hR0%oK3mVP6Z|wLAvKP5fc)9g< zX1%d*)=%E_O;}h~e5$bav9{CTY-Ma#uCQC7j%F(VovC0%blYZ!d86%h2!*Z8^83^Lq_01b$mPRol(M@OD7y3YXivM=bkw)=qwnCaPzjy->*q z+j5IkKFrZm#n1v16l(Prbpi2xsZq5u9$t3BeRIEYJd+G?5xvVD_#?HEl_}@!e&Pa+ zl9_za@n!h+69q@UA8T(bIdhZgd2r92MN2>Ec80?*#AuHQQb}LBWq9D4qPoY3-FoYq z)oq07UcYBq^VV1JEq`$6wd;b8`Ub(Z{5wz2fM` zDh=DW>9d$P2*U1KJ5!X<=fZB zR26i-*gN&0s56+p$@HHxm$3QyVERw?neMmlMq4n7SpBQ7ePV-c%V$w zNIrN89?vHG+>Xw8et-YJ956y4`n9DS%MrG!HEqX>h@2VF?*oT4_s#__0_Al)b2oLJ z;9c?OT<1eA^PF|fyx?CK;x#-deTtl)&Eeq{HOhcE>f}3w`+R2)(225{y)-+t$wyo; zU(hcHM%hvd^2?JxNKykBY<&EuhAW=`^kTU9^06DB@aC|vp-bVzl zz25|{M|lpLvEr6M_rKbEYqNP5I974{RXWpU|4n_R=Y-597j@^B<}aTdQhL&1KqGIx z(;TS!?3usiw{0CK$6K3l=a9e<7^4sewpgzJW35}bUHrOw<(hi~R; zt4qAcPvO1 z&fZAiFYQ@A35X$O374r0x3nOru8z-j9-sWVK zxC;e*s)23#y}sCH6Wz{y6-$}-e%_JtN02T>_LJR;Zopsjx3OjWiH2C(BjHKp))Psv z*0au|f`9dh+O=f~Y-6*(b^6@H`^_yz)&pDghkV5)M0W~7yM6q4gC}R*Pvm7}8s;fR zzzh`4_D|=MGBd&WOv=xOSDT+w1sGn-MEMBI#}7-7h!r}I|4zDc15p^$zu95Kbhb$B zG|FdNlV3B(uPa%f58as$sABDSc#4*6FV+*{l-JB7{}?{1D^{9k66*(X&X(a!g$mm{ zuT+kV=tsAIST2#XMdhoyZxvm6my=gAtNlA(*)7uIW5d}foS<7Ch%bFG21hStvcXrK zxGAx;xx#m)F2xwjeVAH1(6E$$?h0cCU3CZgq`xrLmu{Q=YY z+pjc{A!|mwBVX1;kDq*F4Lw0&mk>Eu$G~+dolu#f%<#ClrH15@p+zFYjcHkXiLUk1 zJEuCGc_~IZU^H4b<@iynNA9N}-SKpk4*eJnpt(XnT$^8ov^;)B-yaE~M@5#j8JSCN zr^-&8!z(hexZ0~Br*1tE_j}X^*cFC)LJyg^kFS9`-!;G`_{_}F1a;V1I7^BoR$+n! zs>rr{MhvX;u4HOw(&AwW)0O1TyPI~dKeJ0YPuo@UKuS)<(-GCh&MXW&nbKJOx+d7! zvU3-YNL=ZDs@TJr`QHV8|95?7aNvlH01o?$PVrgYA8e}}DiQz;sfOY{J92+kuC<1b z4V7J`2qG_0`rYP9;~(a~H@%A*?f*1irKSH5I@14Y(|b2*S0GAT$tamKzVC`7zs6JG zy#24YqU=iH*Mq$V!gsdg`T;PbY5#|vuj;8*Yq!01LLbbt7&IK_);qF+Ps6_h9!r4s zoLx$&w4YrZ0j)sv4lswK`!0WHLT|y_i7;t1>6RE~74n<;Nbg%X`d}JR`W#j$xoBA@ zzW{~gNoGSlnNMMz6XJt#z{}b1APT>F z0d%hrg|K*pMlX6d+N&cR^9@unlyR{4RuAzKG(9E=(hE8!w0p*aVBk6uv1{-pmPaVV zSkpq12-cHJq~2?+m(*7z<}&CzlPfYsVhki`9uSau=WjwaY(a7yR?l$1oNU%8$Kl!9 zF2A;S3HbE-!5h#N!)LyCDL@NLK-yUY2NJBy7jd&xAu?f7X^ zT$Ps9LyY6#G#V*4g|W>z@4hbt${g55lozX1eMc$dz2D5%0O;*G-U@Jnyu=U393 z9s0B#1XeCLDbddG%>XPLfw#V=9eh7LfjpwW;rSh^+LS>&Szia|K7@BIfS~}E5tZS= zYR4RhD5^bRk-?4WU_bVj+STSkWUkvHQ+9eWhw$FOt#8!1zubUV?|%1HxSSIvN^ZGw z*yknvB`y;%2=x^CC~jTCy8h?)(Qc)ZT%Y(X+A88+!f(7xYzF+KizyzBRM4 z+nA8OE2@Pbo4cTsjoio=$@}%edj&4t@&?JWGb{uibSBV42>=H=J$>NSO3D!UqR0LO zTXW8Qe#4z7>NE(mhgH+MF!Wxhs>7izhW{OIk^51rLnn7GPF)LOAnP~7g1I5%Idy07 zaNiO$yHX;E0|rQ_@@cwyy~A#ojZUV3IZW}?3sz*=YrGq01bA?qmgRZ%OjFnbq|9#Y z%)CzCR=Et}{B?16I5i(HLLvwhdSB1y`aH+DAZrgP?kO5WzMhTym>l zXNd|Y*DGLqj|-dg6&&n9@o)ZU4!0J$fd@WN~#@HXX3h^O^c0T$KhDJ~kE~cRebzU00 zjhXZjQd!Rj3!vE!8Pz>jg=j3{u+m=Q_x|ydf?1J z03(JQYhImwG`_DAGhAO43u3A2MAorcOn#v%^q7&=Y~SI%V>~{Rv$vQ488E3%vvfi` zb|8oLB>WI4g{e@r5PXrO3OAI?!p>-}D>fJ~%p8P`*Ff$|&<=%?YR=vxs})65C>vU! z7E24=Z({DG5RCULp`NLC_whIQy%Jm>S!yU)fcDQefW6O<5L#_Ek?(>OMb+*i8owbJ z*jV$|LBTtW(Xgz`Sfg4btszw_sNJq&()0|u*vGKEo(PM9K^=s&HUjkxjtLK{-BIg> zMU7eJ@Q(hl1#0l{+Y^t4^xcA9p51bjqgm#^u5MZxzrSnQKjORfBv^UexPOc=Jw( zOJI+1*%MX@ltg+KN`j&gy}<`O9OtZd4n@C`)k0i9n3u#taWbBVFH*j7kcFxU&~3^E zt@ifTzKtSUgkJ^Pvy3B7->xYGSuHPOQRJ!VsUa%?aJ(7?f{Kg&Ed2%8snUjrP0lgM zhTV>izxR-k1vq zhi|%m!mIX^2kv-O^5T&@Tmv&{FXljNT3uc6QvrLXyke5HCyl44;Vq+~s1Z5T&Md&F z2$WAwQ;IYmrkxT#YXqbT+)v?>GH9yRlqn%A8$sD$pKAJb*ln4mchYD88L8m8;cohN zh)<*Tj1|tFdv(4m2^8{yJC#CE$UW3}bmGPU><-x_ID6XWDu{rB5HWqP5kvYhT zQ}YnBbK9UwEFiGKg?~WJfX2QZ>P*{LO%Aa6eS0>+bLEQ1N^gVgw)uzqlC|EgK5QcQ z_<|hvhv!6ql!fN2JH1mRWil8ce(H7bC%F34#>MeKQamisV@~m+?V5f(exwUHcD&Yz z&Q5?)w~BbAU~|xQWdfaFWZq?9epH~}tFIs@$0M+FYX~ieh|i2ODLum?g62v-?5e@sG6tUMQ5mWdL5r$K7JAO_EvjN5B^yPX!j+H2YO%NC zU971qm_PETt(Ke`&>WWVqxTj!RC{QWsH|wIxK4_RN_P#&^T5?z=5|1?eIb1=6e{eF>4R60|hSF|TJ5rPUUTk$B(g7K>|QlAWB$M69}UO1mlH zns8N!H&!dt3#)bA`jD}kP`ma%z*$Y-$y!-=qw*RmjCMTuj=NlErGaaMFC>GWmR5G` z@&jzCrrB-0LOp@0)^OcVV($yz{l?11XW>Kdx`Eort3MKa1F1Yaju_%+3)pKT6`z>* z>_MK?Dg40~^#)hO_N`iR=l;joJI+@y!rd@;A1GtL+&jg%XlCge zLM8NldC#wPnI7gPi6waR;R!TWj+^f}X1i1Ml>+?RiuDZj{>~16^mgwRs2f(f%{rOQ zEn)*H=-F`+U#EH{?`bS3VJCk&n$Uszya1#ekBN4Eun8fW&E#;__cG7fD!f2_+Fd*F z`3_voMTAFjiP;{GFg6;orsi-^f&CZ&SB9=s7B#o`;pc2X<9A@&`;g^P)W{SxU+>$& zI2S_9W<#eJR=Fo6NEm3U?BY(VU2~k;bS0g=wf5r<6VKNT9M8(zN|IT#`V~iKj$-Vb zEy!lh9r5%Kzy9h{YbA_nO-X5Mauj8#iRa|#+y}3$BL4=`f5TUMlbi4Lx0R9pukp|u zUMs#+{+7R7mG>^d@+%dl9aD!22f~c53*TNrBQ1~m2@=)#_oEBkOMU#6 zT|D(J8A)WCz{^B&|67u(t`37UJD{+2wmBE*2b{MJTr?OhuE5&q? z!N4X~d6T{`SY-mx%Na#6T_kJ}_6dFI_$>bm>Gdn%N2ct@2-3hNTRjj;Tb2TIH8gk@VovB7bWH<5@8wd6rO>+9qZJQ;)gVgC{e z>;lu>I4D-M)Fg~IW1QQ!xh0C@brc%`DMrQKb?~9jmhSVFPQuXaFLMT6o@aW#f8_Uk?Ln~^qOcdl6br+$v z!F5&Q=ZsaZ!`Hj-=R>fkPQ@T_vrVUCwk5>mG!DW*a%xVc5jjxB)-(Uw^)c8vkQRq= z(E~l{aIbdTaD4{u_KI(tESRvdv3zH3rqcCRFEY9KtE7r2I3$QL0vR}i)8aT-g<;c{ zTDKAhvmDt=?c9CvncX^f?f$p>zM)vV0{P^9-&o|Q7z6IDw(7+UF_%yp@rlPjf28iQ zd1(WnNd_M*4E^@5=q}BKE%Xd&J1j@L^p!*};V zaFI6M|9`i9orn`3HmKcJ6H1qB2=*|7GXf{CX$*Z?r6PPA>-KowUllh$!OzyQR=HeS zum^|RU+Ut8$L?|8!WpB_2Sb)>c6MAhG&hjh%MtHuDt1yf0y{_i6%Vx6BxdXQO)e|U ze@@>rF0K zv~F1Op$S43NaTVgFn;ZlzEO7l+9$ZX-%p{2lU}f=;s=oL&6$@tbHR@nkUa;s4DMM-di0(OGb*L&wPrrj~6u<1?MIngg=?IZCZ-em$1F(o1yNI3{3 z<&0vx)t}&NI*?ol3uYXYR)XQ~l=dg?#ud>{$|ka_V0m3pv$} zKBTM(2iGg%x0^uQ{z9vAq$T5XZ`i9m9rsRnM#_`zq zBRvxJ*s6OF0{n%$^;13l^Qd1zPn{ZvT|V=kO&Pt!X!>MZ=tm0)>VlhnA@?P0+Gp>D zODQ`S@vFl0U0U}sEa3z3YeCuynS6;>>8Tg&zUCSX#6VK_hT`$?AeR+b)e<45^2AUP zR)^@{jOnZ7<+^j}R&$VKo=%c2PR(7x9~4oft$u%QH5L@Kd|`_&E%S8?Gnz$57oFlW zgmsQXC_{sQ=jPH+(y2=6hk-@0pFC=77gb9lePb_1k*aVoop#A+-d%h#^|hUdyOvb? zLcIp|O*I{dm+d%0UCFd{dX#sYfKW6aFnP>rzrYZL_lSAXbWO*`PhbGK6Y-iV=NXP^ z`g+_gYaC=7yQ{W>tOq8aIL9s9iXc5gn$kM<6(=t&xSb$9>r9+@eTlpSz1_Ijfnif- zxjL}@Bkuq7>scCKfyt6_2!)X zOCyj=6OToQFIK}gdFdhb_RZZdG<{}WO>r)?pEo43gS6=_)M1Keck4D@pSu5EV2dkf zmqGIc^zIA1Kx44er=$VMfInL3)~`#3!VIr8;V)E@EzDlT07D6z(Qhr0h%@J75w7ar z2$Lw_pmYCDV`m7#LUaQ())a@{pjP8-FweDjIThA)jx0C&^nP*2)14o%K=~zj9jT@J z4%mQs5@7*~Y4zS-K~sbVea}e zvy~OyV6FRvR^%X?a*AMAy^9O#9q5HjK6_X#u2Ow{>opU4zK4*y24-CjNkMSxc^sw+ zdW0oP5?*gg;>bR->x>Yn=4+mbxp_qH8{}037P^P>(T@URg~8Mr9Ly@!iS7(V1Sq?nz^gbWN@-BVDcs*KX#4H21Ky z2|(K83uUK=1TJiVSUqqa121A|o0FG<;P$wO1i2_P8+5tQt#dkj<4Wro+P591=jR}2 zPt8I2j3!Dg@ql?zrdBa4%Y;AJ4M80R@#r(?-lKgRTbhDu?1dq4^9Ph@yve3n)rG0i z*>`GZkY@wbUw&TzYUBBL*2)PRS+CZ>D{&EuhqELpYpD7q2nh7tHh%_#Y{tt-LFo%@ z@9ri<*dIP`V1|;T>kb*pK~f>-IQ9>UySEe6mywMU4}$`@OKo~?;3WY4q-q(iqR(sV zb;Xv|6xP|A0CY{~>L198J5=;mHC@0FfWb11u(zSaPII612EMeM*^;es&14Ui05p1) z6{Tm*c7BSCI%|}27}$i}fKjU$SK{Q5!#QApi|5%T)-{qO!;tdfq#t!{ zMr1g0T#O6&Dx7UesYMfJhUU>;iZSxks+~?xhblYZ$C`8^5KAsv+lm`oxSCv{q_2qm z_9mxL)M7MV>=6oN1l_~xa3{-pX6XF7S|@vdkegY|?E7+w%Zuuh%UfJk%c%0jh%jab zgRR7TX%5(6M+!px(S*=O&;qP}@4I&WKE+mPsV9m&_6gnN&?gvJ5`OvPrR3m%bgoEO`Z3!45K3ou%y%u8!W8c!rOWZ2gf?CaW z{ckjTUt$r`K(klp8CUE6$bV}Mu6&1HS6ZS1%NPUd2t?r$Fqq2UdWplIgq4W2v!&mH z?yi+#USIr}3#ZJG%B;M`>~0o?Ye6m--#fU>{ra8LQ-d>_;cj?|WP`2?Xv(iqx%0RX zes!FU&mp*v6F6;At%E57RwoPY5aztF$J9N#>62xWIGyan%9B%9TJtWW9EY|M{bk1W z?O>8{0l^LMWaNu)Vl(!teN19EVEur8R?Z;jR4d_-fCH7~BMKgKrSM*`RAnxmHlP@F z<#MHBkB2ZsO3p80c@`0qlFKfmwa#l?TV^?&81{$_e6 z>9b0e#OLCVH6k=Ze`J3E+vVS+eF}hP3BMUJ^jG|xKcKJmxxU4MJMyi4`MHUI{?WNu zW+qBWV`fAJeXLErs-(&z9Bxq7u7Bio{l_~SM&yWH z=3kkW8C1Fh24s6hE`Y@`5HL~gEV2^;>$N#+&Nl|Fa9V*x)8(L3rU|SkbRL({a`jk+ zoMHNRW3jOk{j?mQOUqJr`;ZS373n-_6*XY35JRfO!T0M&-Q1C2=PhE~O*N75OyS8Txm-9rcNp zM{LUWwo=Mxs5BrAzlM82p*nPd96z@Qa7zZW0ud7G-#ZikN^~1Zr0Y5`&DFTRdwUIc z<;a~}8w@*&nWOs)DVDo&lvP73o5vD9d{q~16QN~&!7l_$47+hV3=KuT6b^FU29jmi zXA-c`!rF&p8MoJZBC2q0@t}%LFpwbgAABd)vs;EqN9yRABXzW`pHyww-0cU>BcW>? zz^R1q0=f4ZC(+J!`Nx|9BCf<;ZkdX@Lukz<#>BvlYEbOQE_Y#N*@;}D-FOgV_aI+_ z86=U{T7UZd6>jBUH0hWP_c!Kt3Cq6;>GN)0mc<^8UpgQ!1rlQcP1jw$!~H@&M!6Dv z%z&XjHhM~@?6>*YW8dbBAGoIb82%)u`NPDKlRUAKzc`(BNb-Y8i&K3j%5Ad zgH8r3!H?It@v^z1Djt$O4Zac*mq8GP-mPp`%!dz*2Y_sPi%Q^LthdEM&MaxxeOW~m z>&GvhLO}{x31O%)sn{G}DG$x`=V%S13s(Dqua9N_ULFAGL(#5{2tFH&1r0t9Y7mcJ z+M|D_C30^3N=Et##`6Y}(3dP{+9I^$M02{M(k=;_0Fbd>Y91jo+aN0u0to8PFf{@Cgv`s8N~v zbGgm+l=K&Q1?^NBjesKlC5&+IZuDbT_KrjRUkbh+y!-M155Hj#FN~PQaXEUK9npFB zCl_s*SS$~E4y=hCC>WbLQuMjwa{|EbWI@sb;vvz0K4iYX6|^}Y?oW$L9dZp4_cr5- z945l7sv6&Ktr$eMBH#39IK5T)4~CJGk%m@GZZ7!?M;d=w6jd;w>sh>XEI$Tr!@2gfrbYDlZSDGsc-aM|&UQLw1aInw(L(6g9Ly`RC9`bg`K9RFj8yTA0e~a<#BL3yLG^`BgM(c8w>b zb13d`h)D=%f+qS9Y`fzu5d(*2$7YI2X4Ew4DDvQPavvDkjmek6L;@GI|8G^Y$N z2#DrHypshmR)p@o0ulN74#MAwj6k9r>HKdbqb8>Af6Sx6saJ&!9ej2;0x%5R*p}x9 zA{H;ROSv#xvx++1jC`AfUnBAs+V$?|AVAugM3*E`=z;o-Q)X7j11v{}BEsU11?cj= zK<#L~7URel5qGezpMOE`p$xy_6~R8K;(mrdthx-kvJ)I0P%pxP0o9So53CJcl|27D zcLKH+j0U*VUa)x^K(u@WUbxR)4DcBEVBayaVyW7+9H6wbRrA3Vq;rtA_sLf&e`jpc>58-c$nV&`d*)c5m~rthZ^|~{ zO(CXCc&7^+{v%&=kV21koNB0~3xOy!&XzPVvlmvQz+Lnp_REvCEdMY;X1|2{1=#NpRDF4Aj z?j?nYaq`9q(31Y`r&FH^%5Gl`B#MNWtJG#Y8v?ovl8U)|M&8SruZCE3g)5aQ^pt(j zz00I}>X_w9!hhtD<|8_!D>oTm-nuQ`8~ZzLvq1@>3#4s6rU=bzGBMBddqg#swCMQo zV>3AY+DCwwM0WeTIA!mESMl{@da~G*=x=&**aKy+osBex$DRo9%H7SitB5hyv$hhi zp4GlEHZBy{uo<%38m(E#sNFnoyG7B#w;CeW6?u{v&NRVL!DBpSY!8|}E^S_ML=q8K z;!1Ld$u0gxWnoziAUkh-VW9*Wmiz%zHHBRIml%q#zhfw#*zE8%ywqGJHCuyU zyGh&g%Vo&+x<^dMw0cb1-xb{?1qlmA=3W*DniROw2Dc=j$#(ou^IQ}#2lmxl|8IE8*`MIv zq-nRo>al;uQ^p&9ex%_(bZ=zS448y4^pEUT=+=t|zY!8zV+4($X0<;q^UFeA&*Yb) zx2#_uxLUnILrO8v^Q*SjYEi_`ZZ7jToibnAGVff&Pbb0!An7@pn8L$C;-&uB=Z!A% zC`=eo9cSH^Y(pq-Su?XH%J$s(3{&fFGsrI;?CO0iL~y+wjhePbvaW*9Un@=CVg;6> z-AwTen(HvfIBno%P<=;eN|O49nOuE#8y(raDxV~dkLRf6WkSj_su> z%MJr%#FM2pD^(LsNcgMu#fND7dGqvl1Bi=kfafQrw*5C=?5|KIrTkAv{_|Z9E(apt z7Y2U^eOvTM8FvY1wf%DxB^4`jg4M9gc`=QkrsgAJUQ`?=n6A=@&9kp6-m*E z!sU>9P#{8JyKwdb?KZ0}ni~O*gST(Y)@!_F43wze4LFX8<|Wwt>xz5m=JCaGf){U= z^2C}j*lAd?2kgD%%m7Q1@0}{5n!ck?gzMZ2XO*KYU|PR&$n0?`{E9D8NG>&(FGBV* zD>J4+F8fGIzmH}3G3k)zl^f7&-?sM`K}lq;CvBFoda(vVY)_PjO!<65X@(b&en41M zF#Dhod3niJg3o8bgD{AiUEF@iV0;wGxq88=FsHt}t8Ke!9q^BaSh{R}1X+GMN)cha z2Jh?y3>+DgS@XTmDD{ldHQ5IcOhYbBIf;&AYHxC@i1U)T+zI*dGu5y;qLR9vq zhe)Or?EQ1b`nc)*@{O|3SDO~3GREocIEI(hmFkzbK()9wK=R#$aGM$LQ9|eB0ZAFR zBf!xs3Avu3x{@ClJKPjxCx~c`rdM4G#+O3=3gvc1yqNDgiswYJ1ZW(BEe`?C_f-iZ zwj}jp1X6AhAU&n|{dz=@JH2y45!^q-SC4gTw7&h~fzT{5R{Cy>l(cAU%!q=WpA$(+*u z#Sd>DCv)EYfAM-Q%NLX9R6M4uTDS@r;dP_v~FAd;qU=#1$y2|PEGBzj>7Uiz*Q zJd+Ks*Z`LZyO5m^A+W23Ag*YR z89W-6c(Jqqb;($Y!Qjo3Dmud}+4IOBHkrAIqvu+$d7f&|fAw%~QQ;-#?2`?KYIR;R zux^CUp6|rv;t#+(n1EUK7K8?seIaI>R?mawaE`oS^=a#Gj>6BuY42~{%0VQU8!!>#J+|I#>}w zy1K2$@q02!3p>QA)8tXC=K&Dwxm2?KRXhlY^)ytI)-$XMa3rD_xOhcXbYgnnQ?u-t zYdC%5c5nIlCNRFDJ}WHwL6Fo(|6ZR{j;BrPRi>DIM&}#XkGI1*1nwp`9g{Y=7TVdu5m=;-TUp>L(sbq9;eQxM3(42dl7iU zesR<@%Kr_M)sM?R^VrFoF()B~7dH(Yu#6W?4v6fEWIq1RV%RbfQ?|cfewUCf?b3&@ zBrjm@-;9V@dBBDP7U^jdYzaPr1vUhy=~6j8Ue#3&(Kx#j`WH}y%7e=Y z4`^oL+_2BFkll&rdu;uv>*J4Qi$oOBcXi-(IEPCJ&MsszPA@q8))o+QF_h9*+9AEw z*j{$eev=oMcV(3_>>DV1t;e`NX>Jb)=LA-V49mZ9*CmrTNwm_;?XKv57Y}Ucrf`^u zs;X{!&wDp2?s`w>x42@jl?OSmub%2G6|sNRobkbKa8_mTKjmFKT+vBcd>Zg4%1!w= z^5WWY?e1h5L~Pz>|{^bQJpun^*n|aA_UjI`BXsl8r<)PNi~V z2_%&yC+?x$POrBvK}rO0$Zg3HiI>2aT+CZSHM%SkqL%o?UI>(`7otMz@3!~xZf?GN~yf1XU$Ll1t(ZB%eOzK!VYE7gIBjD>U(Xsvn_%y#Rwm=Upu65q3a{GVHLuiPe+DHYE1P-S zZPtf063B_MT$mXeU`L@zWXomt)S&tub>5tt8nlbi8B?$0nAy{woI?xu;g`3`T|UOZ zTwhO#nOY9Upj^H7ewq7n>3laSVMzmF#v6!H;6gOlh2Cs1P(x!~m=WDFNljC-=M`}% zTjxlE<|?Z4JF0gAy0FFuWEcSPbbgb#j}u3G{cdk1W65v-91YS0*e(H<3A3_~46M~% z>EOxbD6w#@*BkZ9-y6bCo&rpRUIG9o+f>a3x4n@Dl<{7%#vM*m?BL3^)-@gQ zS61*{g}5Txkv)#{gnQ^G_d5e9lLjPyTVJad;DG>r-W0m1j(hMA_WYlDnZ$LNrZ!*$ zN^Tm*-j~g07%*-0i6pvz zOMf&oly4Rk71XSc6-%iBbgYY4za)Mz_>UPT)pi_e8VCwnJ0O+y{e!@x6OA6UxfZg= zJC?fZVQ$Re2Nj*2!{V*a2hMK|_lDhX3>W=E`-~7nOhU4-u4HAKS8o`}N`JWnG!)*S zIJ!RG4ivnKw;JsPJ_E@(!U?r^Y!#cPvxY`V*9InFbB8TC4KV~@cdaP9QuulpnwlRn4DcpQ(ZIQ21``o4^Cq-y z<0p!UnKz(;-n878`JKaJO?@cW_c(&~gFlXckradK$y)lbkwcXll-E4h0gp_*e2(?; zC*H@G^`5>p0R4h%Df+obTp;`g$1gITZQF6~*846B7fho!b$c7=4CApFbiPkT7e@Nd z_}QyK@H^OrbiLQkVg5S9G{Lo`S7_Y=tdiGPi}(T+bEVV;?7vnTg?PY|d!H|{ z%)}rIr+q%;PD0e~kC%BB0HH$e;t^UFpVQ-leG3J4@WvXZWA7f==bXDu*0d4XB4_0T zpbwi3?bWDjR!K}>|5Ox-K`!+1Q(TW@g(!2o-n&tWm*P8CX6 zU6#V6tcUFCe8zI_XTRbPTg{>KC`Qvjz6YGQyV~fzD3&^_H4;kOSBe>kp!xd@Q4i1k zpmoPT9B<272~ftbrieN@f~2Ej#j2rF72so}e9rXK#qZDLNw;)_2)6fs|M{m(2tU0f zT=&xftMXT|e2>IM^KbQ1hF}&`p+idj_+UyJkZM)bght5#!wpIiN+$#7xBBbAYS>N$ zHs;($j6`vk_&!%EX$^c^EM7{TZo~TEO#i+3zQ zN4L2@+A->bY*29vTLW%F_f?rqL97!(5Ej+Ry#W2Q!H9BfT}S1)0>OWGnY<@4tlE_$$amZRa2(pkM^aSZ7mgdZUEYXPny^FIDqO@ zbVP+eO{BsL=PBHQmY4Qba7&Ll>7;1dL7#}oL(bb|y|lBX)3G49OW7?$Si1wkWR(w& zFw8^@&`$Re2Izuuxa;=^1QvD(`rfFPp^UPH|7ZG}=9Hiit2NZH!Y^0uvk#bF_VXvb z;(z+&XHvD$o$qx<=olr7Q*jjinT*567U)}=KN!`~5G%Hj0yc59dwUF)x2W0D1?Oam z00=_$T62d7OESslV&wP9=t9C_W!MXPvRX4;I%f;fA$X0)*02~1g|fq8L#Obd_g!Cx zmu!!^RCRGamI0MY2e)DlYBzK?!PT0dWwJcQ1fUNHQJGk`W@C*@qm!q4%^KX!0mC#R zFr$S;$Mk93tZ?3W;<;MPudh1{MsM$P)l=$+zbX}JV2n8AT?C?6`7MXNvZwBc@uH%O;zn`aZNG2J0G0$peL1zMi+v)Q3z4DgK7_ZAUh zB4HJN5XaJ@rHvL2+M#WVF?kGNc-TvL93AgOQlwh#{u#x6shk^(*zafR?%XzO4ysWvX>w2t%+?P*-GuO`Bckm5wHM{ex{tPL72fv-Tp5wORoST+-U z7U2o19MQ3+dhg1ENyy-s$Y)P`LAKpiA!FIyx8;{QBVbT}{aP1_S2g@n7Whk_I*g$q zKEQ+F!FYp&#EJZGhkj)z%|zmWsczSKGp&1-HxdINZ=GB+92e(iX>K7G-%bhiTQ(~c z2F?3ay72NBw(>Bq`pjq9Nm^=}nj4|IN!8;YGdT|-Q%C1$9r{tIAnfbwbvUghHdJ(` zN7)NPpVC94s#}pokIKGipzErc@24M2CcOBGaQBW6W0^#@`zmF@SWK0tKq+)eLVU zKU1pTyL}o`QYeRvjs@_D0A%8(x(v*v`+ffZDd;m>kiC*-b!B`Bk-i~dUX@}-c?}Ym zc<3uNvy{rC9FUyMlz6OP+4D-<+0eBsFvga=D>^YXl3{Dq49IEUg*UnjBGbr`_u~vC z95#1`Q^7u{NgvE3nEzxN2`}sUW9%w%-w(hA8;~pL@-5n&(!K9zO>ii$TZeB7A31J* zrSnME08W=@P=q?NpX?skm`W$iU1ys8-;pJcge`-PY=c87=jeV9J6Q=^8{x_)JqrMy zxLoAOsI_>RoX^B8&~dg>6MyOTEU9)W^)YZdp05t>J>o_#pZWQS@swaIEN5htc%6$c zdLSI6J;1w+?O%Cfub+=t%=6 ztM#OKxSZd>srIXpLi=9BYsi14`+bc=r@u_C>C{5AU98lq!Dnf1Q@$mE=kJpw@SMu; zZTSy)7GgCE-23hgBwZLlu01>nxWe)8d;zEWiGD^38KDqz>XLTpU)cECexVSLf5RR6 zr+h{Zcf^iY;}SJmSK;7dUgzVa$^d8aj+Obq1lblMX}UYE@$2k-w&IV;^zJ}?DIg2^ z+FNO1JUCD4NrqcgRzo@%(AT(KixGdX@!tTo7NEYN8St4B4A&AEXch*mu>|({&Xd}g zm%Z^w6&cYr>Lhci_|4go0uKfQXNo~nitrMrkHBQN==B+-bJW+*2lxT6Fhk==f6E_0 z%HT2mpl+$!^Y;mIiQuKAkMy&+h&;sjih3Wi96fIuI7Avh^X)S_EfP7q?^4Gq zuk$Z7#N=S#S&uQ3#F)p6c03@VxaKa&m}o+K^G*LaoXm%bx_d(qXMAZpWOxVZ00hFZ zTvvE_ROkPpGrGqtc1-@^`8TDq!_Keo=AaN<+PIIOq1mzzKI-=DN6u$g#$RfXf_C4Y zL-&R(;2hToZ3Xy3`cW(_SzfK!|ER!`oS`%)n(+c5OW|)(Q|k0PC6T`<8w&*lN%w7? zh6{K9nM6}zM(c4RQq$0cylxj@J@(+yoK|W~`#c;pD(C$$l#0rb|Iau&XuDf|>z$UD zQ7W|4#zX0+x_8WFdxv<#6 zbi(n7Rb498Q+nUil6LRx=#;#!Jex2My~%d1?T)XWg*>`*#<#B}B|#2~=Ga0o?Vzc= zXB9c<*-u#R^!}R+^mKhdu@uLawUmP;)&Y}h5csLn`4mrrv&m7(PGQlR!XPx>Qg5yU zX&`Ur^QZ5bfeN9v+0zayKWL&DA|4pscV8YpD6YcB3PCUS-gG`S#NZ4H+orJF;ES87 z%PWtTeJVP6zTg2PL&>h9><}99_9#CAIxh4Q2rwx>m!?cuxCu^Kzz$*X+O<`~P!lZr zfC!`mYafJ%YVq=4b$n)KJN6Htej1Xw-nG^aPTh|j`5BrZ>=Q2uTyqnrNN73~K+`8h z&b{Vz>7qqK-s|0OWoipwtijsdTr+8)#F zigEL%>3%>T-rfTX@qFKUIXE;rMF*k1+{WXPm(5`y^PM z3Bc;i00348EoLz?Gk7$?F0({~b%Bs{Mf(l`QY6n~E*py@^3x(0rZVmnT;8Fw&}-U~ zt~Vrq`_queZAgIX9ssLbZu*FVB9yT{WVuCfXpFwba|IBDp?`Q>r!LN}5-=)3>V(!+3B6HZr)^ko2pwhh)X z>Vt2gVJzxnGRS4@${6@0t`;fYpGV zz^*Uu3=g`I%EQFw`LQ}aAl`_w5-fXgv+Yx^9(_fVxbiZEz|d(mW5E$!L@pOlJfsxr zI}nnHne~gqOmh&-BeYmf%>+rgF6!k@qDRtFXDQ7etlXk(Qx4w zu=v<3>7ot|dgx3>U(v%j5#~|5B49x)o|+JrC^(qb&l%uw-+tUM{i?a=2r;xh6+vCS zQkegsyRLGY!sN(U!yPE9uao&N_D-)Z>Ci-#?%wkQ6jw|ktP`C1}ul|$qOeI@P8{3I*p;4DO8W6V- zSpj0Vob-=jnK+;3Co+G*Hx_7FlukRTT+;E3GDzc1zCr@u@7ERf8I12e;%)o`zFir$ zHw1aqmctXa9|8TD26~?icX*eZ9*m9SCI>;yeSQm1lL#CgCRg`QP_DdlV?xb}VSB>; z{D9xavy9@_b(^`6#FjUVDRX5gcq(qN6aI?t5^mIU4L~cilT)wJx%9Z*E?BYugqk$j zFYam54kc1rOyPfk@Wc}k&V}OP#TnwlwdU7sUL5n&-!;p*L3A0&kbC@)#)PmGg{Z}S z!_>(h4G?=Yjx1EDPM|n@D1p!U_~XfYv_V{pg5}?SdOi7SXdxF*3KUyEL4#6Fq*sWZQ<0cEcNSOA!k>ROVSPbYE7M$>(3=() zEi>#7qw@Wxk{zpG!n+6;Wq2---^x#Y0>Kq*HH@DF5d4fI`3{O#=XU8~fhq z#chM2WPyp4ScL3Z^Et-$?7LMcK?l1y>Ij)th?WDIDy`>YJ0$-=EHj`lLjBl1XY!D2 z62n-q>Bbrr15v*P1}=rm=WgAeIP?80;Kdd3=byNkc6r;v@jn1ubDrU-Y4i&F2F|gZ z;(nkGk&MjX(!^m+2EHt#(&u_-%VN5v+=&EmSvI2{csSrtr6Wl}GIr~)02{l%$N!-W zoB^+x6(ogX&4{2^$GxfQg-u>>4lnYx=z(#dgYzX>(C8$dG-;*x}0a{ot-FKG{?k^(@%|aJz&urW$U$apzOMtDT1mwXc{IKBz z!Sk*gUe%)rKz+dwPanVKc!w{CSsRBR^Of%4PNXfp9+xMEJ{R zbyu@|Ul=l!pDXsDPvs81rSd}hLEjni+mPZr!FP~svL(;63ufJa8i4napl0rCVLX^0 z*vb!9*3m<4n5TJSpTN49tEi&SNBJMpp>~-0AI4oaEWq3wlT*jlivz3pW4;5J7!rW0 zu5egG=G>G90o^S(tS^07n2E%NCpmL~@Uh|g%e)vbp6~c~c?f`{iv!Px=nK3a+YZ3+ zB!E(@xytaDHt3wew^5matj(N)L`I*vJ{7!br*dR9k#z|tg47iNRm(8I5Cv%0NJt5x zr68b7{j2|7{0d&1C*WgO7{_;ls1SxpSq{VS4r~=#oNha5XAff7h5Y71ashI$guItd z)m^}+reHD%kjnGf!U>eJ>Pu^9z25%?zSF_!n?$96QDyO>9#xc>H0Sj7sg9Ga%m#1z z=MdGuy`m$ES*a$9R2w0>Y}W>m!+OR)G;Q{evk527Ss$1HRqAN&-0F+P7qn0t#nv?I zjJbxVMFR|J^mgbtBV2X8fZv)NlfA(KPBEee0@aPDcg|p7SK0Y}()}X!{-pcGQ);eZ z1V|!hTqwR}v&c~pbd>+J3sH`9y$LA99|POP>Q?wyQRSRUV*6o);x|4b)8w~M|KUf` z=MMj2m3`Fyu*&p*S!J303dp}$<-8MCIrcBBY)@j9rMaGYb~#53k}&XOC*nKas~u#5 zalHXk!=ws<6j}F#eabq7e-NMks8yx+Er)1fGOhi;NcRBcM+(o_3&OjPQdv zcytf8^+g(~WCb9VE4@*`Za!!+lN10Mv~$E}1oV54k9RaEW;A#`x8dC*)DwZEM)JYn zjHYX#@bSD0$SzffyV5M(V30&xc@dWk)MGN=`P9|PE z3|E!_^23y)oT&?T?C%rjl=O~^OMihcwv7=~>jRRv3^^)ARUQa{XhBe;g|$mizvHww z^GCPt)p2Iw9|HKd%Oaq1RY(T74>*RM6q-T@ZDk26TyVcL#n<8gX(acm%Hl?zz+c8| zZg-HLr!y#H09tvCL@Vo)Xk{*+ugD8-U43b$qE}w0WXQUH93pT0TU_y%QU3ieMp>{I zK^H{T-idukVw6+=Fv{dR|1iq%e>2LuvHz1%o^M}$d6S|UT#Ktz!<3J}eeEoXm!gaD zFL?@xPxmCQg98L2CK^HvqQPepkCt8_%yz+=b?k1({rbL*TkniHzWqF(LrUpZP#ppS z-G7+<9tcQz_Nar0Le(Ir!kp$4?XLQMvq#X4%heN)sOmz(pR_nkaOP5U^o4}BCo@|9 z33}gtkt)9@C^;c&fd(prCJ8{FrM>Dx#+{mpj@K6YCG0~}!7=*xuZNoE%Ia%5h&gPA z$w4oz87S^I@j~q6g^Jg>JaY#2hsyBi?%(s>qmORz6MmtQb7;F_@cv*k;W~TULA6u* zkj>ziwlR5;D^?8_nV$3Xl}Dq~flMEXdIOE`_-DaRI9qq78t?C%^N+oyfLGKnp7J8N z;h2Nezfrjz95FF!JIRpZApKeMr6jmuNJUiG!P3qt4d9cZ0amVY5P6oW;w)0?HmNcV z+ftc!l4zWXiZnFaM{vID09V@;=JR*ENGMS{%-V?50%7xS<+fBZJCy`|bH$xd8tEo6 zZe$K$NQ7y4JGBh`S3!aDuQ-;xnRhw3%5uoPVPFU)x1dJ6Y+xx~6DVB{@f&@N;dEQVdp!7xh%R~0($964iW&qj#qi@)TruxbE+i;pdWPo>h}FW zjsr!qTh}$d;?{F2A(%A)+4J5MDr(;Dz45{yui=PTI29fteh08dkmMXsI!#*o5{W+Z z&H_ZAN$9ZX`eglIdIFIY{R0M;`vZSK)|g?bOC2AbfeC$`ED%A_z` zucV{7@mH<`SMf&gzh%I|hR;wHEJLCphL47e=@w_7q|uqoPDx>^lzR?+I3(4@S;{zG zSQ2)w+_G*NX1-?;P-;4S4ij~w@Rqu~6u85!YcBB3s-abB2WTM+h1%U-a0W>5{y-4d z#5XAa1%vxev^)azf}48s@RHE@NB^Xg_TLciJ5A2h^gNo>KNqT?Yjd}p-l48X;NwT< z47>B*0t(%X4n6Ha?B}f=>Sa#$PrQvL!-KqRR}Q%!BUoN*5;&Lyg<)Ypq~k=}n}g%q zE-F~66U^EwL9(Hw(VoCnw-dO!Ji?hK{%^im5XCUuG&;dnfGf-#xWbH)n!u1xu*LWB z#`XukmQZyjS1)3cIJ~^ZYGeW20G3Yu6NGv-_%xs>NumE&^3DG?C~9pJ)V4Zn#fP`B z#BIB7vt`v&VS8&$tu=+iUm=WqzrA=ubeoS^sbNR(xkNn25M`pUpyXA)8SR=q_JBo5 z%hD5q3{@R1U$Mj3)O>*?6x~_5i{`tD#4b!biI)aSTsFaUnAc5+f!WY}1;huApl>nx zcxR4~T=>1QIbnp=4@d-}b$U^#ODn7cI9oaqwrrqnr?*Bien_x}nkju)M02sfOMlPmEgHrR@ykrd1v{HZWDTrT;HMOf#Q70r&q5&_|;Xmk&@?y-9>E zcC8o~)QOoJeHVR8i%ini!<79HxMY}ouexb*CGWp+!o&wYZ(R?e^Pj+rGA~e8UN)v{ z(H{e^BlZq{wBX|&9UFm4Hk944ALDUeoOc4~R~mpanJC|PFhE@@t=%M3$i()udE;@5Np;cnkLtp;koBp#12XSn-$-+V;|D)6G%!DL zZP-J|B@RqUea8!w03*V!klbe*no3g>mE{NFYR=HOV|pphzZ~(;-AY^qzaTNt=!TA*icb63e@ssF4YA-@-fpAv z4*3lTF4{?gi)}z~QA844biztWf(yH^H!K0cMZ*_`F>q=k^&z`3;ZHZ(BDFHhNw&Jz zvRkeNpqS`6b$?V=Uv77N@Tv&Ux!?XGR5F9{qDpSKp&d$FS$?)WyPt(rPbcFV&eZpw zM3%dBXS8APYAar`u$gf&p_t&X4D!#+&2VWWWyDVeXO zaC8~FdO9{2epXip7uSVQ#(0pnA7x2Ey1U#ZeB61p4Mf`Bo@8dgDp0597(Pr~me+hp zk^HjjtTSK1GCG+8c^Y+b6miWYJ_{xEt|e`)A8>>fH~^ClTbd`qB8rkYtx0cOxB^LY z(e*FQMSqKe^3>8{3ZlB06 z>iG=yWyf~_vo+g9gsD$_e4rs_gjT!Ts01<)GN$MX?2S)vlbM%H_@chCi*46xFZ1To zd}8>cyFmO4B^N!2A)(|0(7$vSR-#V1z_u@mC_&nQ`MtsBdpm3($O~4Sl?YqHI>tAF z5rXxC`u(vF3lD8&-pMRVF@J;iqQy>_FHkDBpI?%rC3QCq}NHE`6o2q`@1UWh4np}(uFTsPwS)&svAe> z#Cf8D{XytIJh1jiDiPp;3uxd5Sb);y4YndC@g)U!)+GCtuF&Y4GJ`8S|A0q%XFyTH5yaa@qJ*na^OmRRZ zwFROyFYOlvQoIe@xZSIg^A8oQ{f7!3f6jA2GC8sQp@K_E)gkV-+w;1|m*x)^Qg~7c zc5z)j+9_X`zF(1eDxqoE!Y~eky_3CcAG`U6?xU4h51<*pt3b1*7Vuzn(kY}cc=9Ij z5*f`shXFSDBawv!o^2hvXPii6u=8Is`14nt2=kRl6DbDrq2Yj`f5_k>+4hQIZR0@u zUakk7Q1xR&72u^KmDBgMpI*P;w3e?-Ec4RbH{(K*!tCRdZ~X+t7U=y}%bYlc*ok4TTq14JdcFOS0)qcK(B>B>+5a90K5J0WtH6 zS1%&D%H)lNRzqL6#(O4SN9x3sV{Kej@!jEn0d6}adhg&)CcaNS6zAM`fCw)Yb+hL% z_Id7_g!c2ilOrqjxAZUQIZ1&WLz61FN~&8SquAJyS1H9vz!$9R8uA2x7n6w?mfqne z`ovT>3&g0pZg-kY4f4_gOmpAA#2)q#h6!R#)dDfriAe6SWSH}_%kj2SpkNsexCAUt zpJ*^5Pc#@e!xW*nWSLE_W2+Px(0QDOT}Pp>x4*wpl_9b^2FHSiwflQB{9m3uw*fVe zAzs;&C{_6}Hub2;=A^)-hg73QC&g^>N4`u(M-G&_0#9*r{O>rByA8*nT7lg`9mE<@ z*##o)(x6}W&tJd4d4g}hyz;9b*==}PF;s)0?kx&baJ_a8vo2PT-W^isJquBl=Z(^2_nQn)dVg*V|*MyKo zHB~_Cnf}V5NYB9O1VhjZ!@m!=?4RauYn9Zpg^zfD#yZ71Z=)-MP&Y}7DU{i z8)hU_2*YkF-krVe)&;&-r|~r;I4KYxTcDdf(mCqW^{)KO+h)GSYJipVh+AA~V#nVb zA|-^72J(+S?GNS-_^{0UFymJK=UgA3G;i%)*Qb(Pe%QSrF6>5;@<-{)umF_ae%@+s z7A^0TBT;U|yS+$KMn>){wd%juNIj>*pBh=w4WLGr?!Rhe ztv}`5>#g55>~B%=IzbpPvQEZj+5+a%?8|4B`Aj_X?!!&x;OpL3C|_)iz&uMVgPT01A>km}*9HgPYGXK8BTB$3IL zl~X48(#HHHt|$R2yIO9>v*te_=j3fFL=j0CvAJwnmkJEFNfCW5oN9dp5WR*-g)M~2a- zwTWw5gl^vy7~;J1X;r@IgK};fQ?N1fmA@n^*Kl!^c+Dw$*zLWVIT($9$N)AJts$+7O-{>KR+1hFmZ}o zjf@0bIG)aopJ46O$3QXcs&5DLVTvH-TtRIJPx$Z92=w9;3$@vrMzA92eq-+2BN(yHB#6!F650AWmmU zXUZs3qB5UP|G4J@_y$P1VjK;*FY#5+}xU@VT~3r;D?! zvg-&6)u?})!hxR4ll2BxkU`v!wK4il^UrG}3kp7+&9)3!6BOS~eZBa-{qGZzk-ZoC z=PqB*NpAAs6j|6s62bEK_}@&*`zZ~OalC`GY^vy}PAhzuYoHIK#>S3KrI{apD)=@?x*&o1wOJgv!`NE6Wt47c6 zUIy+-LA(Pe2Jj%epE4M|n-?qiKtodv-LXdDlpGmAXCrqfQ6_1noUnKseNrW^DqaGv zWZhs#)Oc6|zL46wV3?nHxsQ-*OV!BA+8)|{tP5(+0W;Y7?`u?)#KQG}bj4FGonZo0 zyN?qBTaMzde4bjkCqYJb_aPw18wiNlOjOCP`=7>dDy)FxBC~D(YnPmta(QCs#AnjO z7nFZ)9zy}~B635??TDR|wzen0S{KTC!7W;MW(>zSp9m0(xgR>cv`sX(pE1&L^#&Rx3;A&ts6 z3?Q|;v@SeVZ#3M}bO2SE76U8-8?HoxZfB>m2GC|AF4Cx(>cJXXBfYT zj$vPq?4SBvS(?$7FH>p%>~dHkZm6@e7Jx+cvsplix_3g6bd{o^r8J`ZL~{*9wM;#5 zU-EjNs71lzarONshc@imrPnq2v8Tc9O_NfPPJ|W?1mIw-x2cdaqIxkGcBfu)2k80D#QHOpQ z>ABRlfBxrVJ> `p2&G#E6{aPsimTtRn4T@Sd|p8wVDeL4+)r7gyNyfNM&&=LJD zHc?*K$*ZMar2O3-=}cLhf4jdoPmC%hCq|W@Y-J;0_2U>$g58dKZVRgId*Co1wDK@@ zXOf*YD>eF+V$~h|tMYB-oaF8NT(}b^L1<`+l+tUdat`!-T%IRHvTzQN-jct6IkVg_%&B_UkmzUFcCUovh1Sn!z{SqoO6)k<0CTr zBg7yBSt?XAulaXpgjK~+#Hnt84e{S0u34Ah2h2q z4d3`Hq5Jd*;vJ|_Ua(BX9%B=U+_<_E*-uLTI@VZ;WEOqlu-}&%`15Hvgt}OeGYphuSWFAvBiLe? zzT$PN_JQg;QQqHA1rlj?z>#=W&46${gg?)Ig)@*T{o@zW!!1FZ=__Hz!bhN>_eT?T zPs6gn!$!00sW+=*u-{nw~}P3!GSV7RCA*};09=fho~ zJ#SNCPxYQ~+P(otl?W2BOUTxH-%(M9f03TL2&hz=cFRKgywg$-lTBos6)coJBr!J4?#%8@b=odpdRSOoE8ZE1x@?JyZ;!!=l^5;&dR2I_y1x1wm$7ZGpzL1_gXpM z)S_)j6cDOBtj*-Gqn`8U+bBppy$VyuJS64az7{!tsBu3_pniW5W`)%{L}p4o$$58n z-pcCh3gCF&)$_tIUDl~H8HaA4(R(82z3KXk!e^mrc(QJsKPxa#xBj>o4OB2aJ5LJ( z$L4ptA1#jQKZ$v!Yq=hQ1RUST^p)x%ItnN6RXC83Q%dn>P$wc(UaevFm1zRAuAinc zCl}*o-oBM^7y29gZ~nTz1(?>&`*V;i1b3gR%O4km69w7-WSM1Iv2cmFh;`eK#7!xN z;6`6xi}yymMnG}tyJU9kH;SFJOx|8YeG|*>N_-Nss~Nc#qbdj;P?C4ftaOYXiJjt52YIux4#kXJg~B2M?AY2gD?=+ za9wrm4MJEc@htC;=n>Aji&sa5LFauY;ZyrVN{U@99d)MuQhI^z8B zKgbSRv0>x-7 zvYL0H97tZ<&h)U)l*{KVB7AG&{Rn6o_Bk3ZK{+9IAF46o(`U+$6DPo|EN>=e`gp%A z3X0F2`*^`*VSfI`L>5)ecxf(N0V94U73VNEm~XL*+BfU_d4rous)W6K1Z2> zarwYI$T?Lf6C*+h`$pW>*`bY9m%nmt68>yz&=7j;=m%{+eZPJ6^)Qx=8+o}=@&XVK zdsvgEgmZSPjzKjR5JxvuH3>XN!e)8n>81C|I+4r9{42A3T?ilq50Kcw0Et~e5AI9o zqAY7fu+QuVd73T|(5?MpbyXHch`xCX65PnPoU*JCwtU*lcACk~)+Y8>&nc&yS;|9v z&(vFjDnHD_FN`(9GFY=76&LYW5^_l;gEQbHWh@A#oK#LMKK13k`1*Xduf7rjBLDp8 z3Q_3H&JdLw2K~JS+FT4dRw~u{g0)r0#AFl_^}}m>8RoQrA-E@If$RI+&REk`NKD=O zhPt#OUME_9yFWQcLOdYLd0rn`AOd$jKRAEm;uALcxLZ&ep`E@sgmGsj)=}&@+7@$Z z<2EQUxy*k=7)WaspiRPl%*R*MLej!Br-9MwIuSWj-7@A1bVz{p;_(2cd=_kbm##aF zbLn)jgy8dAPeT-HESgoejmUysk1p}hoDq2yq@#X@rX80Qc-u?4QAHtw@=Vw(vgc*O z->wFc-4SJbQ6F(}tQ3GjXockS!s?Az-=AJT&8tdZW{ZYYZkB8gndwiD1Zcs;ko5h@ zQu*4R`uH!ahWZGs{=QJH|2n$(N;RRB6AkYWaz37W252_hJ35( z@{kzB3p-C-9dCNi_`L}HZ8SZ!bRKOexFLj;(1xP0@S#akWSow$MLpR*-_+r?DxI%| zPw(|8d7mA-DQ51{3HZ**%DMZYz#l2729u2CzfYNZ{Mn4!(rwoRKmT(mYy_YB5BMkh zPDM$>w_snPu{Dy^j@&2S_62$S{Z2t><>I@N5N6k>kZJS#UKra^?BAf%5RujJ1q4iVZBz@_)Xr5c;J`*tMr^!>}o?aWg)=(A?}$iGjY zqPk1N+@*>$bn@$sPu-_-b@IPk<@PC01^+`!b>&Rd!nXJht?=IH0%w-y@d#;N0vtilhh)j`AKFQew(!z2 zmY6>;E~T7drAm7He7so-r|EW@oA<_m7)$M$SByr10ckTPx)8B$AzZ_i5CM-{N19yz zxbXp$bjQ-p?%wLkgFIy2>9jPk)-C_6Yi4uNW6FMhZx`7YcN@3gBfg-`e3A~XqY}_* zW(4D^VGx6&_kQe|;_I~p$4PWiv#QXVj$CXH{Yjv~6gez}* zqw2KbjyjOPTtSM~qso|V{iA{gDd}W;G>Q^-jXkGbS(#aVSVh1E4sPS^nWTh=A&jTW zRHvA>&rR=LW1G(1EE8Cn6%nb8bE0eao2x~iA5iIS34vN-X&m+Yv_lcTR(xrz)PvBp z_fGM4a^uB;zsV$fBd4JHC8Ar>TH;Xt_-|_Rr+f)l!orq^zqdcA=POO}8g>Q))ZTGru=xun^DVgH6P^?d7h261&UkD+_fN1;sVeED{jSYXyoI&5h> zgEHZtUKrk0+^Yp~@+w)nt#6uL`0BxNfa9-u@|5i@&82FpBkHy5HqOz7>hlhbFB8qr z^Ge-_EbFkkn>l&AnOCXOO_QT76iE*Q(L=+WzJ(7PX~z)ES;ouLp1b?<_qWcAGgcjX zIX-<6K_Y!RXup5#o)q<8b+*R9h-Q;t*xh48O*QN5KMCh%m&-% zBAvbeyflR_UCKx~551xq-Vzsh^%nk&8F_gW{-aU*k5`}Wl5>CR{~A7ct>FI5G!>BBgp~}%`sEKzYw#Mh<=X1H0{>%*v!c}=~BeYRP zM~ql0<}min?!yWi_)(!S#7@U3V-nZYd0A6_OxsVr5T^e~55HT_5AJ=T{Yeba@2IZ6edheGDFIvS;OBnk;j$VJzraM^=&DR!(WTTM z0rP2~<0!SKo0M6kDuxV_aL1v5oQZS{zsmEq*cLp)y{nAAU8BcG=Z_Y%ns!H4DtbK{ z)fHOb-CA&45vH$eu>i+K@XFx*>71t)vfcMvPo*qN6?LxAP5Wo~-8Rj%vSYu{yRCfD z_$ayU%{k;z>Z7_T{-#>y5}}@!iipdxh^WHzd~Yqi3B z=h#g|iHdldvorJyY_M>(Y7WwtMmnLj!WIzn|R=pH}7z z8($EufreJlftU&>oK(=Uqgp9b9&&9wI2)(W zTX4btOtYW3lZ#2lG7k3Zg8JagRB$HKE92`OkZq@v-Qt|I>wzXaqP@{#9bV zVRK6b%d}VsoU*G@YdOsD)yP>%j&{kM$9haU#2O>SCiy0iG0?&f?%GSRrt-du~=_?Ax^@vHC;wR~oaEAtK z!3{>W`+Xp9i<5sM>%RspPk)Y}YZj--gMJ15E8^s{+--}$zXhKCpgw5!ESOuN=6)>1 z8X+$lh2kD2HhHa-V3S#q+t-^Q$}5D4!I4mZ&6P6ieG!Iz=^>T`d;Q_sx?zEUlKr+c zw_U?Ptbh0t9J68%thVCSgwev<@s+)UAyd;`VW?pnp-gJN4Z8@u+|E2X8fM&`=wLLw zhT(yYJMp;uu5WZ6Wo4l=fZ}p#Cyf2T)#Wg{{lhQ9DX-nJCh>K71EH9C>-!xw*;3#`KJntYKUE6<#u*k(z8+l-I0N`@nnJ}#TcQ)G^cz}hy$e(HGT+8UWR zU+G=nwD%>~f^x4GiTbX5Pcp(oS3D<*>Q+=XXAlp>k9ao>U~Bh*gZho?i*WewD_yIeh5?!*C!|DXm8hN2yx<58mHXL2g?TZu`8o z9SYp1AhY=XpS3UEKyl6PTHa@`Yx$1!LW9Haus6$lDGe`2yCBtd6Jzg5xacmx%V`>+ zX*1_x8%i&6r$o+|ZxHsXuo7G^!7bia=V4JM1jm)6G>yGf2YWA$LhoyfOMEB+;eZ(H zxM;~l9HvXP&&pKdIn(1L^tZ0Q`v!zp-#qj%*Er!BA*G$cS zUYkR!FZLNg-{pbj0szj4;4(F#s*98&VZ6e@mh>ANoGE_EFOiP}DNvPMa%cAU z+L3wjPu1s=`g(@xYs{y;B_n09W>zeNL(V>RjgRY2+vDlZE?kao!ugw344`J9;-T83 zX=yWoUw44znV?%9{ia5*?pqi5{6`@Zd>`*wxV&4{ElWGwA}Ef!k$HD3txmwh$4-WA z`XclF;UmA5$ayjUrf++~7t&Y*VCItt7H<+%$97wiihd!Y?a0Plau9@O}r>(!?gsfwCcNe z+^goQr?|Iv)=c}n*X0eS~)aNO*U5F76}NlH40_I*t~u+x)(v` z<@^5qP0Y@k(wFna@goI7=?1?zJv*E?(B~Srk4IOQ?`Ju_9$k6n<$}Bga=wKTPzLTj zGAoNgHAzKKA)(KCSo=5_?IYmhNwa!wPU!wE86k7fUN2Qu?H$VNOBc4U6h<$9e-m-j z(Dfx7UkW&9?`<*Ocu)ELUh1GtPRoAV_khcBh{@gkJwZD9fdcyhj)4?t=nVohB~#!d z0(BRB>nQzG`zH<$AT2WKmeEOs1xsky%u!o0OZ9)Z#Rod z$di$&#Xp;6gC^ju9E$j7%|*YlLUJ$aYYCw%pE^L&$5!E6NA>OK>nRzWtLIn6B?+c7 zirkwVwz1>#sWW&oqx% zwzk-RUs+dwL1uM+tCe}pKbuZO8*5BeE#4~~_N{=t@~q?b$K-1%FyXEjMSN$6)VWhD zh8(Jtp2bCjD{u*%uw>lsF!{E5SI0c^6?YXF667S0H0YT)SWuoS&EhFb2KZv3cl(=c4K|P=8xa3b(WpBV3K8$ zhS_iVnMJ}dJfqAPshvSX&2av@@n^#;wuu4{_H_rUW%RmAjUe8K$UO zkygP%rSX1jZs|;?Ox2zsdlC0XtJT}(I(!!oZQcy<(ForfBfY5*%DlZoI)lUKYE~}^ zWNE$^K>Tl*VMqbAMA_vc=Jr$V6&&)Z{xz=ckaOq)x(J-C0;? z3?=TvLEXOyrW5(+EB34h=dkk=^3+|3HwR04h_7>o%g0Cba#Q2W56#MP*LEv^5O1;N zx~5pT`tz0f-m(Q&iOVI#$V;nU0>||74Y4!XtAP`%{`r+42J;<5B>3U$(;#xuctt;F z9WCp2gxEKNwAJrtsjuM<`R1@wb?l;r zZ?kyk{&uWh(;XgfSaCWkMEKS6Fgd0w7g=FrP_=b#D|O)tmn!BKn~tY?Nn?mm9>v0= zH|w|rZdhl>2Ur4bzw)dlqtnoXz%&bcXbHAKX?E|%69eeANmWj|VQ=}KO|5M3rMk_*E1(NYzL7)LoBLg~ z6LN4BcHSAZcBaDdSG_9Y%OGdv2$b36*UT%|Jn7$C;5LS#*iiTD;)-8n1T`FL)ZUpHVX0=hl3{BJh!f>|qYYE{QR3wfz= zjM0Yl6BDrS0S_ZrydFXf@Gm^^uC^d~w3id~q7q@&fT9wMqGOE_D&2OlymSE8U(#MS z{Z(1^^1EM~4~tb?)Yz2{gEkmP36ib-BcXR4eEONMm51-K$Q&!w9jodTKWk27n6!Aw zTo!|J=7tB56=VP5to5bogVOb0dQ2>w@Zk`_KTl1|U-LToDO+=l2kUxZNN8fK zorEeC+b1$X8uH-lvZldhEr>8Jx^KPiPiEF&vdSi5(P2w&Z?|QRG8Jy(8p$-T->R?n z`m7Jy_jg^{F=?#6kgLsR%h)-ZNVv>o^3pnO@D{X-Dzia~zhY?%K(c*Tk}6I=Pt+y@ zecnc3%#?dD0*3NYW2-RQYFsy9WR4YD=d9VX z_upL_BxK=Gqr%U#4mgE9Eea*t;o{ID=Xs$vt0qO{6%?PpxSH}i)N^0X1 z5suG^u|*IK`rRg6iaz49Jo=86Gx)jx{Naf6d%V>js5}>H|2_ie_$(<}E&$z4yN~}) zAzdz@jrZ(EP$|hjK_(kQk3#ADY%Yd|SO~@R^~X*ljz=M63cJQ)M~&@qAShHR;7lRI z%&AU>y8YLYFUTRW7fy&BCFB3ZjdljIyGot;O;lkYE~3~6Nr9r`b*L()F`Vf_^e zUvfuwj#s@*u!h>iSS{erVUah7g&Y(7Jii4_Q>}u@6vXWQ@H>760Iar*wtFuZ?|3e)iF!ultWCMtk;Pm|-!+z#Lcgy<$ zbKA<5w{1Nr(Bu~1^}YwH5(j>?ClIB^Cdo%}BTRLw0Ju_z z${CnHF$_fI08uFDHhPVocgk#eEDLw~3Kdexk-N8s5)pEb`61UOV|fV^-5u~DzuCf| zNd%sE7TzN^LOY4~c$XLr_n(CO7oky}r4QxwI(8!}`z-g1J>yen4n~EUK~?=facZ)2 zk_XXbL0UPEG|VCtB_%a%9@Vr;(&tE&(zgaAH{u_lJ`wR!8`{h${c__BEp3ff>WJjU z)tPO%Si?XPg><;uFv3b@>$Lszfyp@4*Y8h=V{`DRz-O-qzNsIt=on0wP79v};*9R0 z_j9Ma^6ggOc9@U9zcw6kr1o9~U8cfbCCeb;=c@uYam_Y&c4iHydBG|Gaif0AQ8-s~ z#z3;#O9fSN*EcPNVA#dIGfc06PbT`W_WjhigP-Io7MUfc_F#ua8m&yYPx zv8e|HtV=(&`r8*S{`^vRGc}nJB9?gO4~b>m#!TQ{dB8t!!NG+dxo#WH%6u^S zta);~_MD76#+9Y3Eg%S`N8o+6y~>^6z+qbgUc_PYv}eoEHgIqU&Tc=tYp%6=7S9i& zK*y-LVki%bux%=%YaUM@*Ggy;!J+A`tv}Nk5pn@S``NnkQ5(wm*0GmxU=Pq?q0;#i z8HrX)a^?Z;$Hu)V?W5j$TLyi&h_F3wPlRd^wW151xgD_M!%&y6>05g8I7wdo)0@N# z+5VoVjU|7blJoz%Ilh{t7FdD1my-lj_cp~YtRMp$}wcJ;8_%jeq=Xp7CtkkmjCnB9wa}T z)%wY9Rhc_V-VNxYnClz!{A5ee;kSwNKfQq7*5ih=5X+PDtoT6G17W5IQO#RXPbx>V#0Fmr$j* zP!m%2h~k{<`_|g~d}m+hy3Y6$Ll_xxjQ4r(=Xc+a-;+-1uQ2wo{XDrdAE4#Cyg3Us z2{E5Z*W;1(>7+yrkFrr{!{&qG$0`D)n&Rjd72-=zi0nSpHAoO7tB zj2~}V=f6C~x*p0u*gM%V&=AF-PL=BAwO-x5zJGjaBRf`S%wB4N_PC?Gu71Fw*$|u6 z^(%lhv=CDbPQRYavNW^#qF-)qLSc(CpAV>`d7r#0v1S>8h<-)P=goP;uD#p;LEz-l zQpjykdX0OhmQUQ_%_*i~x4-5{k@(@ovYrRQO}WEuEVM=0R-9rwXwb0?-hG1VO>h5} z?3+h0gB&}IGDK;N55%^|aCr>D;6RbZFtuGUGm-v*DfCmJGR?amib5@hppl|M@bkl> zs*`yhoE^6`2MIsl#UP2O!9uAhg1-HlfEFSeC;i$C^x=?cZ2Nri4(PHe4kilk@rsr5 zzBUe$*>fJm8;|Ne^{P0~*=3)lQk%vk8tp!}e5{~o7ei#B=rO-S)xsmose7$V=$svH zQZH@qlaBCQnYdtg{Yg3P9E}A#HI6&Csltrb+|TpfJt6n*1{Mf5ueV8)Ydi&CUCYP@&1)fAQF zl0w#ur;e}wmCwX?{)XHg`A9m+8drbZ9K#(kN0F|k3og!es=ja4Se5IRx%+mMEq##|eTIX^`ag0Di!DZI*R^JtDzos*P@iBkUD}u;jj;cNL?>Melvn z1l>i;3x?iz<+6U5C^`gs1{Frwt^1y0pQ(Mt$|bewa>FA20RjSElQH{gE(fWHmkcye z^JcYq754YC8W<=By(^(!lYKM*KliXNw6Z~h!!*>z`INg|qI8FZ#SUw=!|0>|6?QQ6 z9d2bzvH99TPV8Tb4Iaq;PQg;g_#cY-n1SB~ebMp)o}j*p)oIMB2|lde!(Bm0TI+q)V9?j%u`WfP{DiLPwEBBkNcvI+Z$e0 z6l`HNO3Pn&SFgFl;2$7v2V)mA>dGME`Gxn)0QFNyxVwlFb&L*0&EphQfM3!JVv0Dp zRzHw^OV88B7!0YbcDc~XceOaM!LY^(Q&>%`yq9kBX*g3}*Y=wVT9xo)?@A&S_?rQ# z<%&9D1SiNRetbcOo1doqU~MTa~>l$3xVXZW_63Wna4W>nU zsCFpcE{(;{3*_^{V@Y(tGrSLG1Q)w%4ouM8Ax-3Ju98sVRvLXPFb|y8BHCfC^<)dp zxhN(`dmp%95Jfq#R|sr3$q|7gX>(H@uE}@Zd-rVj8EtmiXz!YxqC5CWEOmPaC|w-{ zYIY?L8+crbD#KsK9M?B#g}b`h4it9OF04bQ*DBj=tz@eO8JlV4-YdAhNOE7FCP&qou40Ez&@w57*`@+ng31z{FWHI86ZRY(Bv)$TqwtV zvt?(Oc}HGjntJM_4e2~c$@L07&2s!|(xD(}&oV*Gm&gcR%)KBU}zYtJ!VN5h3NN1z7VN#aQK~mz178VeT1U<{gh44$iuTsgs^dui2t4NA; zyjg8aTfx?;|9Q8mSV^u?&dxi~_mYiqCa$S2zuT#DGG{%#_9NrzH4U#+UKq9Rrb-&_JsD|SktUhv}j$!UId%Ghtx=M@z{#X@ygz(a+HN(34TJHaD$Sd?7G zGCeW&#Vf1pXW~kI!%RF*a39ge3rp zPn%4``v(}Tw8}0nJeoir>v>Pslr=gxqj1gBVGjyImBY)LZ3mTzKi_}+n$-3E5}}T@ z&|)Z`xY^Likg*}@7^xoqYTyDi&=3F3Bd@~m#h1PMO18-xwr)tX@J#8v9;6kF zOxR`gf_AI?vSI>nZL$+2DCUAO;IxEwf$fau{tSn)!eT( zoJW6|qJSFs1p|iVZ#KVlr(Vn9?TQ*((3WB4#3Q)18$9Q8Llr&MQ6D62U<#1O^F0ps zBC;uEt6v1_DohnJDdNm?CNzTr=ej6QhQ8TC(;=RA z(jHkw@?%xwI^T%C7|nh*1T^FxsCFimxpZ63evX(Y3BU#`GQIe8ysd#zo!(y0WGZKJ zEjptBNW3H;{@Ucuz{!{}>FRKtl!M^%t#dxSe<{3U@pcM6Nq^>CF{?m(Uv}Vvx@mf+ z*4Wab(vzNH1(%QIm*Ba+Hg1mT{987ZJf>M!gQkNzE%t~HaZ+TP4{#`T`UCB33-!8r z6-0ZGS^w;EXfU@-Gxl@?L4H9ae>~$>a=QLd*#a045m(2?s?*b-N0zTyGo_r1F87w{ z-<37vP|%k4Hch4p@BYaUq9gG8ZT)GWb!9J=!0@$_-;%zIwAD#x zTIgh`kB1kl_`PA~&?K$0L=b59TvQFy!kLz%h#aN!zs_wM=vOI8mhsDoZ_}RF^0H`N zBG)kkq|+lN5_457xopXsdol4Rk|$)8{Fbe`DtNnl%Eb$h&ZC-wikhOnkw^AcE&Os$ zkO+^h-O8rxnz%NRI&Lbpw%PK-D1XYwE`sgSm6WvDDbM7vmi6-;&sBWYzB*pX=10wP zOdJQ^;n(3#Re1jdMLv&k;eFOZh@Y3tUg+6cHP&DM=I=uR+)4Fc8oUtSF{ioS^Jcs} zg#s8C@}Z}G*RV;GB%or}?RVNeLIcvw|23NPS%5vAwEUm# zD1a{b{@oiox+&)@-Xr#HX_h=mt7j(eqx~Ze68G_QtK0}{tf4HvQ5mPVDvK9e7J5{e ziE__eUlO7;0DHg3uhRi;Dcl#Xz7AeVJtE?vDRf`7m-q9+_DT$KsHFNnOG%&M3MMa- zxSX~HOtAAco9_?A1#Ri8U*9d@TE+}$Yp&&q&~q5aZ*-GzY=rd+;v+w+Pu+F*#UD^o zryv#=chSeEcWlE0`YINE_vZvtsTFFy)HZ#?HjmwGJNfkn_q*$Uw31d9RyTjde0Hu= z;Ux`JERu+G=TfjLyYtUlAHg2A^CB*H%)wx5f;)nz>K}{aJfis7b%1kD<8*UI?THYYI-to4u@8+z}2jTjGN2u%v*#u{xvBblc zDB_3I1JyXPkEiewqZSJ%vyhqaUQjOmYUn`y0&of@tJ3U?OjhR!8~ujP=6{`IehtxQ zE8%nh%9w z;A^5t-~AZ&ws)5VRG^*#_wKijpY*t+2vg(qxD1TAbNAbA@>kcYVk{x9b~ z6!yNtfgeQE@k*y|K7tc?nRQ_MA9}CddrkRpGiuc(I#1i%lz6aR<*pYaXumegRzGF_ zB`S(XbZh6ksbiJvnn_~gVHhc-5J09svFeWb-SNOZ8CRC!gv~C0<{^RzyLvw=PM71y z8f~WyK=hc>Gjm|SIY|!5wcXao#7R|qjRd`Q9Ov5@6YPf-F+n*$;_xo`DY14;{kdx% zDF-tzIS7W{>|d+;wiF2haU!)2xA;8|9qb8O+^`=Nei|j6L*EF)JBe4xuSI7^YaVRx zpo3AOGEkr_z>JI#z%Iz^_0HBye$klO#7L;CCj4TAfHT3BKM`V$@)Sa9N9@3> zp{zVU;Rk()&WkWC+jG*==B7Pz9vkiE<$*s(j1ngFyn+~fy9HeIhD$Mb z%|%{j@9QB)ihKkOHeYzz9Y-Y*g`_s`PIr@J)nPTeU7I8Lj+L2C?p9t?;x(P^O;TCR zV$DLMPooeCes2e2N#D=Sym9;tk7(?nnDZ1mi_SN*Hqz(Q&61nErfJxq2Wq5)A$I#s zIN}aW`amezaBFPcq`&pebl^caaS!4K;v|xY1iIzd&3h0NFP1%=Wg=Xx95;Ib3??ps zVfyc`!-%BAL$Mw-RA5r`{j%VY7Vm0!pQvdS3kKNmRuS*+?A#0%qY5IH&KE}Q@gG<* z`8^_x37hU@ijeftxEuo*@(#)Ro?@Qy*t@%0V18`P5{>@h)u0RlO=1eiLq8z$xL?l> zCVuicM^?g}*@MiookxApzvfqD5#mc??1VRu5LS?+X?M&jEJ5~%C%*3^apC-hZweZ1 z3qX{dG!Lu$xO*c$1GFSgFB3|dd07@iFaSfCU%oD^ zOYT$=tATXdlZ=PQoZk~7A#8usIbl*eVJ z6S-R8qVvyv5e0s%cR0l_bVft(7@S2Po;Kxxyx1i93FL;{eYk0ZTNqn~b;+D|yI0ac z3=qC2#Z219wm4p_4eTsy4|Ezw<5*>zxmuRAJ5v}EH!lkrf2i5?+k9C_iGJJQx3kR$ zj6GLHML6Fg-R0e+_T_sfEnY40vZi4@$X*>0*vPK_^3A%YGhfF~m2{#Xz*v27+9^=ilHEiRm`z#Co064Bud47m zN!zFjsUpk-%becVq8bNZ;m8O5iH5}2xP~9laq&d(-gXxtgXz+L8Rm0MGEZ~8*@lCG zG<`I1u?oa1xBP4>;Bh;!a^j1AruXxCoQ?;oDxxm!vhc{ih*|l>R&8y?>sYB4cykWC zldp#K>@{(_05mc@x3Y&{#gtmFgB<5Oe=^X3d)OSarF|aqI6X;l9+(C8z#;{(-YE9h zE%6k&dykBGWquTjA#S5t)WY`{H(@nO?`|0Ul?jSNd2+&@TL1d~)H%fww{0KheggNZ zj_yN`8CZsmz+v)3c2nYwLO1H}s+V8p)-7p(h9LI7Ro;nXF^UO}-@ZHU!&Og5G|t=) z<@7yJGH@;{AQY#>GoC{;Rl_6F6g;hIbvJ z;!kh7IEO=}hE+^{sB20yJY#bb%U>$xYvjIcK>m^rAAG}ewBTliz`d@nLDQ(E=ZbIR z@^S6=ouHPu7gcS>%g!wAKfsFEPJQT1ZJ42bDnV+XtC;!HJLGlJM45ZuWNsVr%IM_q z@s7LT)7YA2;10V!V+0L3jQRQI7V(ON-=i{k^$y&4zEet(b-BB<2>fy*dJ64VURxcYE{=(tvqMYO@yZ1kYJ z**|KS%x&Bxx#olGU_Nh%uc5a|Z5?~Q$=6#WcCo3mdSly;Yn?&l1|Z$o26Wc8Yy(bD z{rckg?N%X~yf)N2mNt5I+V>kN{*=O}Dd4AOb{UmeXiC20#Xi{`-B6k~!^P(&Vm%`t z4-+qNHSHsI9wiXt^5buYN?Y020#(UGmuCd$^*-joALy7W=`Eq9V{Arw+$Wb~a95`D zyw|DD3A|qFY)T<*J1>LO0v1|-UF5UxcUw&m9i>ZxFeTo(al}vR4;H8Lyynaeo9~F0 z5~boL2@CI$^7De!w2+LB^_o2<14t)Yg!J49e&LX}s-u5#+K}~9PT?!O&=6DVykP)z zFWnM<|0(W^9cZl&R^mDZK@TlHcUZ)u_WzocwSPE7I$M8ZF=l^beg}jo( zd?Dy4I?pXK8j;9(v7+^`Cu}4Ju&w1W4Zd$E@}%K z(h5H@s&2gfmJEjZiNiK0#Ck}gSw_TYUATbvv5Mh5zgVOAxRZ1fidFkpD~aI@XG)@| z!H|&Gn~&bmGchaH9~^X`26FL|!R%XvFP3_K0{Q8i@i`?1x@TThRC#$NR%S9@6=L}O zv2-6Vl=wY=TdSF;0p4sni(NS&7MH!$bXY2Vqm4$^syCqrx;={(&|H(zf8nM@kj=CV zyid3dU&ZaV>Ld8apA{0n@nIHf!`^x3RG&BmFS&eW71tuTt8vBo?XOEOgV6Z-E65)M z2^47j`%aJvXcU7oyC^UO?cL9={*<7v82v6@G4wu>^Wj%8)%ck6$C4d&&G{j5;uRsk zsB5D!)>t-P@WD^`P$Vo9nC21A{hZ1wU$PTfQG1YzEn5%l<5XuyFeUad7XaMb&kzKq_j^&1 zm7BD5sKQ0tvu+x-jispk2d>S;l&tUfPZqdxYx*0Vc#qmFo3eC%VVH8#335taF0d~D zJzM#^O+1A@bW>}}s(HVGXdUi!Q*`OAs0O>8QFg<~?KUoP>HJOc+L;pxdsl$$Ol3zo$kxqTB}LVO7dX z`ciPn5FX;|#a^-!hQzD-(k*vMCoG~$!SP1s} zY(mg_4N6W&N93XlAdxX*kZlyhabyMzKSkGjY={Pw4i6EXC(g2?G1vEDajV4ELGrh> z3M}BQYPWFN@w=_g_)G1gAn>_E@XZD6$Y2S*AmyV981SgCfW=yioH5}U@3t&!N5hEL z`Cy0jzbf+aSTxPcg)0g^IGtSpqB3iN=U{Gv(36OWf0c!R6fN2fyYAD3{R=!6Yn12= z!MbtzZk*kv{L-K2O$Vo%#bB)QAn<+Gu?xEkn1SQoMw!q9L#+-$b({mnS){-j`OO&| zU(S1vq1L=p+M#*Ws^*P4((R)ARisp*dKzDf+4{*$kZ_v3nv3q|lAmHdqa<4q$lf zuS4lN0KvO@Zms+PVZRT@dp5q-L~yp{KCXl`;Y`}RGU%VYRbTvqyUaw~I^s%|R_szK zGox5HQnvy0P^?yTXK2mF`KZ5GNA6})xKwUUk!8>`?)$Rmvsolx%tlnNP&k9$hP{$n zGX&2y$w80)yxsLK2@60=dTHj+_t0f`I3%C-XT9(2)S2E@eD|Vs`>Bsf7u8U9J8CBl z>an}-KF)Xix?Xv|lGLUT61ifyXteo3_28g1vlHQXDSFCyOU4S*+tKhy(Rg|SV@qz& zLE21O%D~(gED_8c5r$Zkc zUGlVDNq4&T_48yyz-z`FZ709!x2T}A-gYOg!;}4;*I{7jkV={3rq98L@8^A+{V3t* z^iaLMX3IP^aXdUn6r1!-4Yz0f$x}40+6+#HXvrJb{X)yXEC@D%>x$p%x}R7=>Z(h3 zl%=j3fLucL=7MaCPdO*z6S%L6?yGvs4N2(I3|ySJqnNWvnS-wimbMb+eZSDIbAR-X zLO;Mz@kTbe>QU66(wKiE>~sGh?7Tk}Vs1;A0~#H5VvQ61 z$i!P`p+K#JRXn!USr{IFH5pO_XSAoz?IMKPuQuFCKEtxpf;-(>LHvZN2C*hAkQ-jy zJ>_Q{&!s6&$2Tmme~Xl?*5dk{td=)>q(f5Zy`uyaF?xfXT_O3-hJ9$@SIW^AqPFmy zQNY4Qprs_(?2sX^F41`943|tZ?e<4;`r%mW;BiJB=$+ByK;WE8yawV!vvNjuH0Kj8>K?ee z0QT|cPCg|$Kota+5WfMs$Rr@_KGQ51BT+2D-nTfGR)bynBtik;>mZ|&vj=RU1Z~me z)iWkaAD0wGQ1Dlr*AIXwFN@0Ctq}Ez#$4R2us=PuvbA?0ghg`frA#m5F%^s6ASa1Nxz;0Y zn_@THJIt#@GZPY>*=iX~?Xgj*t8O@B`{W`zqqvHc5Al1_;RjUq& zt_7xz@FycHrYKah-(4Zx^V^t}ErvLzEeu(}-`WMW;W5%uW`-lyw=FmF1M7op`j z&^mq)D#l6Vy0<^n_f=}{x!1gqv(BfX-N8?$c^T`w+2%_x3qb4dJ|Hmw+AR<3(gWws z?q`qeBF)#jbZ^wHB@h$zOSk9%QQW7Ohnk+4+*|L}qb(5F@{_o4?+T=-lAm-yiuYaa ztkONGhdt>0^2Q^uB+bH3#T7rDOor{&PTdXOsQKZ4DDgX(fYZ$}Nw2RwId`A# zoFKDK_O1A}$9D?(G`Rm<#= zxC^|KF&g<9+ISZ`JD{?a#wV#!m{;A*J(SHU#hJpesr3_ z@f9@}OFz&n_{t~W`}xGssia#_#tj+K{Wf2`?2rUc4N49kNT0OgOpc^md}buM|>SJXc7M z)z;p~`ap#-FCkVrSl$=_1E$_2413m^X$<7)CV8QMGIeg;OkRaWA*{(Ky@|$IU>_Wx zXH@eVm{$70r+JVljN@0w=`2b7W_u&r(Q8;A+;5NC!Nsnm{|KL77XBZ@=W95A7|v|x zt=8f7T_(}#XpT-)4xD=)?uoa4^4qX>!lsb{I;7hpd~_dlHU`KR zCGIqOj%pk8n%gH9eL=UcSYP+*dfafOo@}~$#bhMC5AY=QDw)sy%i9Y>G7=LOC}MCx=c~N=@eDmVEd*8vSdpY zmF+rMm3otYTxN7ix`U{xuKyLvT_0u6MS7y~c8o7Z& zzyJq{6TqF9x~1VcExmx!f&xp8XLAo}~NrBgp^ zE_|yG9ZpbJ@zu2IXS&nx+KD((|q&vmvo1MaxlP^xrZ(Y{aOJ)eEM z(q;})I#3xPN{Y0Udt`syjiTjc!^)vaK&!I3mt2lTz@RNDdq$}R_v436^*c`tb zbfUV8hJz{0ZgKiugr}V2O~#~i#otk_?64#!=TS zy~^(UR4+^pU)sY(hx-yWBUfUcA6^kAof(B|5jrBk>F^-e#p&)A5v4nwsOzu~hp4{$ zT)C&%T0P6!A8U{w_$~z7clVIw^nz0vn_MT?a|?(jbNccGMsi66T%q@!XluLx^sNT$ zMP$b>haQb7sCidS|C1~faMdj80q#ga0DEar;LAEOYBA-aY_wbP3oySxe^&Ot1Lnm4 z3Ye4r2F$0ZIjpF7yA<@zN6z09hMcwvqOI7;-MAIQydKkdc)2R1?Y^QG&BKYfJfrl; zXO!5p{Bp4Qw%ivbbGDaY1d%|a?_-j2V8P*0cpkLqo{V1vAT_k^-8Us7)(Y{sO9p)J z8T$|6U$)sDB-&m$JGNbsd()fRv-_;FEh3u%d3iES)*ri{g`F|0bM?P0 zg}5tlfyZeL{s$;;SYy@^HO;6BeH0LAG`o=U4RN~x0Lt6`0m||Bs^1T%4sZ-^Vymj_ z=MN*ejvMu%=7daNETD^P+&th3htAePGGp6Sbf1ZATpr&|T|_XaPSHDhGhG2X0IJxB zfjUqr2kcvy{q2#%08f2g19@`=yXUaMy{p%A&{zG8ck=-JIoPFu#eJ7ur|49avl~~3H9W&#mSYDPQF=+p1uaZ zy_EQ5MG(C=%RTu3>b?7Naw{g9Op)u2&l(P*=8P{1qeEI~C^@IKB9PVO;G(@ua^a_| ziBS8pXsIz9d-yX;R#^C4@TN+_T;BE^L9C0E5T3izR!;x>~hTD4=!Z7rX=M{NUJNX+oroyg$-XvFeMQIz8=>~p3w!de$ zI-lCO{Z7%{!r%(n1gCH6FcRW{ zViLi~r0~(O+8+S{qZhyR;foQ`H4ZThG=3)U;PtP1mJe=?U@B(gpm0o=K62jq!wBr+ z3VVMOUc5PQR|tl?G=ggvfk+{y9p@L);B_q^4^=N65APM27vi-z{c-F2o+@F^_>apS zz@~0A=3h$EBH3vwN$+C_DDy){Bf#<ZXRXS*j z-!`!2JbF?)^^PBTcSL`i>ieH80Z(kXH%@2>uAKk@6D{Q(s zhh+6JEM?K-&ySIrq(#%L+`B(d77LOLPTmun0%p$JsROO|G%InXKXona{|y$7Sh+LN z|7WL2k<~9nO%FYp2x)b9v#AqQq$7%M)LJUZrPE8^nnZnNL9C>JL!#uNf3`~$Z8_f5 zQS$;sAriiMrPp`bJsoK{@rmwVrH|c=^`&k9p~7|*)en1lVwi?#b{fOi{qH`R4E>@S zxD9`p(a8V3TeTp+<9ff_*GL^&A=8xZ$I48#(u|^~ySg``pl?5>MQGaXM5#GPTZE*5 z!MXbeKZc@`!Zezi4MXM1{{m|!@F)V9bcwUc^l%d{8+D2Bl!=-Ll$e{!XzQF5?`aRd zff)7__M=l>IH)<0epPd;>ULscd4KQu0L$D&b8;0cl49RxEsOa3BNmYdlGk{SXa2GW z-oWnO$xqXtOXrWe^eh7bU^7@Vrrz>~m1$?I4Y7rm$faAd)mjtQhJ8)eR*w-Yspt<} z7iGUOa~=zn`DIDvdjFqs;>#~uflgWnNzBNE}Nwl9|;@EyhrAI+>tpS5Oaj< z*tD(mIjrCA*Iy~}ZesqhIQgs%9$q}X-?Y%$giap=goHtsEadMpChSi4iiOQ}9XDO+ zZkaQkV3H-5uS;xB*(u1LpE}kVfP5SF;=hVCUz~as*Gl+&&s=}s*3KcVxj-E7ujzTY z7gf8jU5H+fQv>2IejmF^oZ@>un%`dV)HIf!foupaa-})0qMC1K1~h5R&`0$&S2Pz8EQ+Z@jTH2}Pb^lmo%_FgB1+)f!Omh}`lbR=8nW*}5h3C8E7$0;!`p@qf8giMP1mDBx)CT03&IkN-~`?N<{f2QmS7IRyppXp?^fSb~d# zDcN4NXLy{XDDrv|c<8Jxa|2!n`?vIilPJ-(Toppya^%mck3_cYLi;jHqSwe0A*lGA z$=~~T<8PSz)Jg3EZjb!lh8b~-K55>HL8e4Kp`a*Awoj#O8WXSprEu2?MxBhz1n9sH z%KV4^|MzOaRZ+Oq;06RD{oAR2NAXDKrfu)c@PhK%kQ!2Eq?-F;-|$Mp3IPccCtTOvw$J%Db5RhBK3BwNR>I2N*cKsA#WoNx%B ze`BwP31?@cfrZ`)TagWBypVx@Quy?ENkpq96#4*x=@Yyvh4r#-ha(|7@+~&jlZ|r^ zpdQ$fwhr){@*pkMQimB1V$PeyAOC*|1`;NUKEqGE>dY60Hp&mr=o2yD3NH#1Ph`Bz z+~|MK{ll=T^3P&4troqJ(4F-IDT$w9!Z`Gka&Ys4WL3@EewpnI;;z(98Fd0510X>oo3`SE=lm?``(%wwyx7S$6^72u3nYhoE6LF7 z2Oj|w&0Q$I`pxofAshEEk-6~6}205dbXH;Y%rQ zFO?>>20@7;+W+EKUx`K0LrW{IW9UBy)kNCbWh}TDiMbt2y#Qd-7x7L*t{GUByD`A7 z1dv!XkXQZG9-G2U$JDQ5YZSnG=o#`_#inyCA8VU56JzNzEj?dYgs%qhSRWr%yu0U zR5Z{VNbp^wWi8tvoa{Az)$n|fR*mwvlcc`-zj)MTFW7S;RVr9gI)_~(`EGD3a20(Q zQ$+G|7p<=MGSELeVa4~namWj{PXSJ^{97;3`>hvb0eS%zu5*4^%`OPlJlBJV7#mnC zC@1~S!D(VO_yEBJLptfP&2#Wv;?}klTn{-+P7Z`0Pm2qKmx%1eIZG;m+6ciU%>CTv<@!n?8rG_-mW(~cf%9O*LLHT z=Im)%kDyk#fk8nuXb=B{U|ZrBG_f*Pf4OJ$xcvI}H~~V+Fif&6@uDa$AUZxLyqi^g zovSk@)(?FhmazBeDtTG=KC=t10c`31IqL)dH+BHd*B^7F6v(a^rbX3l2;QR(2@g@D zQs*Z(7CilzGqV!Qsf9FZCA^D(egehpG&6(3#|K+ZxdknhZw1N^ZYeLEGsUqnN!?4a zo-nIC7OOfAKdlBAuQf`)$3kIpa6>#B%)spKoFMtTf3X$t@YNQ!?t(LmRzpPWxhqa| zGo|gV++p~5KKOD`8|s8L@vas2yzHG97vgE3x5g??%c^raIIN+v{i-drz?Vh7g1)Y0 z7U8w*V4}N1LeLN>V@rCN9Jkq4WCGlp2t>(bXG-d_2*2LQ@@6WNg@S2^U}~p!YvKts z@cO$O_uWCp6*7#NDxV-V}K7GIAAat@fR8?I|AmIybL;$j-ncLkZ+C6OMGmm?NmH}$-W;=T z26B?&g>KpoKr>HO9`eoowWyfs#nGNl>IxOfDwpt|Tn09DR?%?1gdNt)>uO(&H{VW= zDA_)Nsu9Je!uqBevfhv%f{i{+`*%%C^L)Q zik>2&pMlR~^)xK(I%|BMbb`Y%pMS*}Vg#qx7fSc3HgZ1Zd)4~%3=;F#^sTg!`nZy1 z!@QLkmBURS&hRjj8N*a@t#Uxjb2!*KAjFoXqKNca7Iu4~1QGo$Z$hrSpW}mYYq>_b zEb+K2VO^6!!G=$#gAKnF=^4zlZ|H$RwVhVK-%m#Mp?` zAKtXKh_BxDMrb0(I#6!tY(E@xDoEh+p0eTu1H%Qs zibCvJImKjb+6o`+{lM3kR_CJF=L#Q>njsuhi5{O>V1N2sZWlWI-)$2% zD5q=Yu#f6b&U5n|{i>1_KsN~UZoz--eq?{UIaS?*&U5+^V(zncDWN>kJhy9^yXT@8 zA^A_cY5c&&WnM-nyxKkGb>Atj_UX?vf4`Ui`=6ZR)q4F^qEerQDrMJi57K%~)w#Na z+j&53cRtJ=*Vhirw3YOEeq0ET$H~@JWFVdj9jmF7xy^kyWqf%Z9_^z4P3Nr|QZn2z&#YIu~7&A?#n4=fg#|KuP~+N1CDS3Uwc3F!Uc+lgfwwo1zR7kcmZfU3<+JghZR6H3X_PxC7D z+Yb5_2z00@ZN3WR6~(UJQE<@#|C8vq^e55p^v2dgsu&Q_SE=tmuO+Fh@U`IF-J>bm*G#Zxjy0-_%O<fw!FGMx zFNNUKQAyfZwP^4tGPLIS!Avx=Z z;HDvQGCKsN);>#oc%|_3p@gK=y%~d)XsREAwX#MN)gG~Go|*!25`RbEQE>c@<|~M5 z{kkbkgFZY3IQ6*?C5opikMX`CXY&;mlAS}R)na?<`3>q9f-UP`OY+elh4bw~t*&l* zy;ndZvtie#W`&L<{TE+)A<;9=-fWQ?or3qB{#Hupb@lF<>2v} zh@jW$KKPS`M`4i##PH4Jzp}_M9JEX{GqHPB-dYZ86GE%L5DjZpxfINE`Gs8gA%C{# zVjZ{OtDUv$O}FI5Bii&h6v`Y~Ge5X6vpV%z0e)$Wi6;yZ+|Elc?xSC+{E3>V~^ z8W|8Gp}wC@&5|P>JzWKXJQhagHJ=p5-DZWrCpad-<FxO1%LrP4g=v`s>>g7w0H5?;o^Oym@lE&eHk`^fD&P&Wl@<&37?c zb=sNjFj>*>Ed*p;=IoeO}uH)V=U%yoivfB7xLiH&8GbB+|R8N{vZufwvwZui2(7z|;)CW*zGLkKT%6Dr| zTwIF%BTG=^oByrHGE7WIyLN$=pVUz{<_{mLhz z9@+#uXP*Y>hh)uv((?Gc1*?8r>x@{1dMQ+Q`kspw<4AogWyXH};p26%Si4P~(E`Px zXkeL3^!N+CeqI?}U$`wPiInKiOdr5L-^cYY?V=MMu=$LHiG{LrokPxu9 z;eeADhvK%MaJd$VJ-hPXT zPgK+dpigcsB7Qz|;kqbyY2;Te?8v;WfDlZ1lq^@5gj*kcxJ^|gv{UDu(W|pHpv6Oz zm?#=niI2mU4!|>(^z`5?BKe?m6?^v|5=Bv{ScEF^pCHK=;?lUTaUw2 zO>BK=MhxB63%qjKp)u`42{3l-#ZEyzoYC-q5*;k>M(ZVZOTM#|g-!=m+pHk9=M(ji zKwEs}UquJmg>kp&9)MU&u0_QQZ`6A9&E>32U;Fq<-97GuG<~u1WI?mP>WwmfE<0^) zczQui;HGLW8!%kq=fSO7>&kx;@-|Ev`>>nD85hVUK5sBbP#I$f+hj=i5|wJxjoLO4 z_m65|#DZe(!s4emUZiMNb&!IY=N1gG6$3-dwxO#SaoDmRvFhn%+E$& zg4qCmaUa2nb8q(Jc2VE&3_XMa6BaDV3eZah%=-psHqX`AfR`?wTl6|R=NMN)7X?&;%tMzOwiR5R2{En&`@Y}Z>FEnZX5bY>|O%_cH6$jP9Yu7 zr|qqVu$nR^&Fk3VO@@seUi#g)z!o`&nI$r<0JRk!jv@QXAYpxUZI{# zNYf6KG5OnIxJOMtQBq)*9=HvJ>b@)BsC`;san`~n;xcbDO+Ju1(Qh=B-g{GNY;Ndw ztX3!He09pbZtJG|Va_*vBrPJr5GY7$YGfjlo8#RH=pWIcq6ho{*erWVw+v75X;>q6 zN+~<~WLD%+IgAbhWetz4Uf4kq!gddLFtpuj_m## zw1AIqI-gt(%FLE%_?68I2C{io6#s{i!bJd5IMDkqkiznRh7>GV92}4F>|3?R5_MNW zT%%fXGq?b2E?DIfIs;QY%dUuoYV@Pt?`!pM@HhZ)q>~pj4j5cSOO;l=5a{ehr#o-k zTL`eig1_O&{Bj)Y`oK;dT#57SWj0fhxnHZjC&eG}!ITpO#P3406Ig8!KIl2+6&l7{=!UrTHU7iWl3;<(`HkPven3Ww>=`ghQqNP1?m$Sc%kUgvWd!$8QFrVAkUD6CNUyyxQC_V9<-^3SW|f>+Mn zoT~T+0wk$G5x@6kmE=&tMvPcGq3TEB-xw=BP#3cFpol4+?oo{BY^8*6+APwr+-U)yD6i-6{v|yGtiUVH zUo=YBGLR_+V6xgL7M=b2DIVs&Mh9`1O1}K{v3YZSC+5@Cv?+3KZzp0nuNWnL;GWE? zYZlqYLp9$*pW{6j6^gIrRP7s=dyV_aN(xkDneND z?G_03&D=z2jv$vWz_Y2oIJG~@>G@vi4~ck_f_yQjf6EwC8iypZDd#sjakUq+03ysI zkkOToig!AfW2_Iah{Di&hET#8p(yRJC4$}nmad#LtE%}K8fSPwhnc=T$7l6(iQlDb zS<&RqJFXXK#jMZBk%3Sx%!9uG4WeR-=IhrP`6yReZn@@F3Z35)#5fm7J&|4XjN_tB z&=`NRX{G8*27|+2sKRGNV>5s%)cVfY_ZVR=qu$ri&ZTD83~Y&ViQN>Fw zJ961#dC+gwL|}|JFIKJsd6J*xk{4(~coD`h%x>f*L zI7B|lo7H0;+F+d1hS~1%Qr@V$UfVIG_ImHWhN2XtiM@v@0moUh@d-_^#On5M{Ki$) zi?zWR;dc>@LM4Y{Xbctkf{{Z%D6|llzkHham4AW$_%Q5NNDwphXzathE8%b=dpYZvE(yqKV@j)u(gr?w&~E`>fa*oG>;QL-(Q3x>FS`VaAvQe|-$uf<-Wo5-1b#sDUR+U(W5O6-k zFreDkk&sSckH6fS+`z7uia$7HSr#x#*`$0WPYDJsewNCw)jDi07npb%R;Z?-Vvs7i zEu!5=_eqt#uUbC?|0N(dgyDAVpdKW;srO8#geW z58e;J0b9Q1KhQ#!78zQo|2Zr`h87?pcJcqBLlpg=b%;V~mw-PyMB#tYA?RH;PV0!a zcY1#SYD0=1d7vcrm!c6({apq74^aG{vIJBb(7N{t2t8=BlrNDfbKlRC)(P|6ohWBW zJgTC5mdX+-Rq8Y|B2Ha_dE#ak6_UBezal8TKLz0rYR30PChxx+dC{u|g1Nrt}>HT#zj^w(`IpAb_bj_QG&^nIB>`k2 z&o%)F>)8<2-7;mx9Gts`5U+Pt!cn9I1%(`WK7jCZLzw__DRVilNyTmXF@p7(y|`D1 z$2Jq7PYwqG9%SpL4WXc8k+x0Dy&VPOP@Y1?^zn&OFku0hEFo0{mPpocep8%k7x=%* z`_8DQ*0oy{1qBfmDN2bI1f)t)N}{5o(o~cpB_JRmC7_^`6cG^=rFWD_SLwY;BGLm$ z?G5*2`_>LCK!{-#%HZehF!2$7kgk-6f5_ z+|r}hd#ZF+$IoBNfGPYO)p~sJh(UvGTJp7{y%_k*{deGt*FOVBv()&}8)P}5hdeW= zZKXtn;`@vTHPGi(`r%Q^xK0lBPuc*?SxBD&Oic