Skip to content

Commit

Permalink
Merge branch 'dev' into gmail-fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
vtalas authored Dec 20, 2024
2 parents acfcb00 + 83caa77 commit df58721
Show file tree
Hide file tree
Showing 32 changed files with 600 additions and 437 deletions.
12 changes: 12 additions & 0 deletions src/appmixer/activecampaign/tasks/CreateTask/component.json
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,18 @@
"due",
"duration",
"durationUnits"
],
"oneOf": [
{
"required": [
"contactId"
]
},
{
"required": [
"dealId"
]
}
]
},
"inspector": {
Expand Down
6 changes: 4 additions & 2 deletions src/appmixer/aws/redshift/NewRow/NewRow.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,17 @@ module.exports = {
const { query, compareField: field } = context.properties;
let lastSeenValue = await context.stateGet('lastSeenValue');

const sanitizedQuery = query.replace(/;$/, '');

try {
if (lastSeenValue === null) {
// On the first run, query only the latest row to set the lastSeenValue
const latestRowQuery = `${query} ORDER BY ${field} DESC LIMIT 1`;
const latestRowQuery = `${sanitizedQuery} ORDER BY ${field} DESC LIMIT 1`;
const latestRowResult = await runQuery({ context: context.auth, query: latestRowQuery });
lastSeenValue = latestRowResult.rows.length ? latestRowResult.rows[0][field] : 0;
await context.stateSet('lastSeenValue', lastSeenValue);
} else {
const newRows = await this.checkForNewRows(context, query, field, lastSeenValue);
const newRows = await this.checkForNewRows(context, sanitizedQuery, field, lastSeenValue);
if (newRows.length) {
const lastValue = newRows[newRows.length - 1][field];
for (const row of newRows) {
Expand Down
4 changes: 2 additions & 2 deletions src/appmixer/aws/redshift/NewRow/component.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
"type": "textarea",
"index": 1,
"label": "SQL Query",
"tooltip": "Enter the SQL query to fetch data from your database. This query should select from the table you want to monitor for new rows. Avoid including a WHERE clause for filtering by the compare field, as this will be handled automatically to detect new entries For example, if you want to monitor a table named 'orders', your query might be SELECT * FROM orders. The component will add necessary conditions to check for new rows."
"tooltip": "Enter the SQL query to fetch data from your database. This query should select from the table you want to monitor for new rows. Avoid including a WHERE clause for filtering by the compare field, as this will be handled automatically to detect new entries For example, if you want to monitor a table named 'orders', your query might be SELECT * FROM orders. The component will add necessary conditions to check for new rows. Semicolon will be ignored."
},
"compareField": {
"type": "text",
Expand All @@ -52,4 +52,4 @@
}
],
"icon": "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZyB3aWR0aD0iMjY4cHgiIGhlaWdodD0iMzE0cHgiIHZpZXdCb3g9IjAgMCAyNjggMzE0IiB2ZXJzaW9uPSIxLjEiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgeG1sbnM6eGxpbms9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkveGxpbmsiPgogICAgPHRpdGxlPmFtYXpvbi1yZWRzaGlmdDwvdGl0bGU+CiAgICA8ZyBpZD0iUGFnZS0xIiBzdHJva2U9Im5vbmUiIHN0cm9rZS13aWR0aD0iMSIgZmlsbD0ibm9uZSIgZmlsbC1ydWxlPSJldmVub2RkIj4KICAgICAgICA8ZyBpZD0iYW1hem9uLXJlZHNoaWZ0IiB0cmFuc2Zvcm09InRyYW5zbGF0ZSgwLjg0MDAwMCwgMC43MDIxMjgpIiBmaWxsLXJ1bGU9Im5vbnplcm8iPgogICAgICAgICAgICA8cG9seWdvbiBpZD0iU2hhcGUiIGZpbGw9IiM1Mjk0Q0YiIHBvaW50cz0iMjQ0LjkyNzk4OSA0My42MTcwMjEzIDI2NyA1NS4wODY0OTgzIDI2NyAyNTYuNTczOTIyIDI0NC45Mjc5ODkgMjY4LjA4NTEwNiAyMTAgMTU2LjE0MzAxNCI+PC9wb2x5Z29uPgogICAgICAgICAgICA8cG9seWdvbiBpZD0iU2hhcGUiIGZpbGw9IiMyMDVCOTkiIHBvaW50cz0iMjIuMjY4OTQxIDQzLjYxNzAyMTMgMCA1NS4wODY0OTgzIDAgMjU2LjU3MzkyMiAyMi4yNjg5NDEgMjY4LjA4NTEwNiA3OCAxNTYuMTQzMDE0Ij48L3BvbHlnb24+CiAgICAgICAgICAgIDxwb2x5Z29uIGlkPSJTaGFwZSIgZmlsbD0iIzIwNUI5OSIgcG9pbnRzPSIxMzQgMjM5LjE3MTI4IDI0NSAyNjguMDg1MTA2IDI0NSA0My42MTcwMjEzIDEzNCA3Mi41MzA4NDczIj48L3BvbHlnb24+CiAgICAgICAgICAgIDxwb2x5Z29uIGlkPSJTaGFwZSIgZmlsbD0iIzUyOTRDRiIgcG9pbnRzPSIxMzQgMjM5LjE3MTI4IDIyIDI2OC4wODUxMDYgMjIgNDMuNjE3MDIxMyAxMzQgNzIuNTMwODQ3MyI+PC9wb2x5Z29uPgogICAgICAgICAgICA8cG9seWdvbiBpZD0iU2hhcGUiIGZpbGw9IiM1Mjk0Q0YiIHBvaW50cz0iMTYwLjM0MzMxOCAzMTIuNzY1OTU3IDIwMyAyOTAuMTY5OTA3IDIwMyAyMi41OTYwNTA0IDE2MC4zNDMzMTggMCAxMTggMTQ4LjcyNjEyMiI+PC9wb2x5Z29uPgogICAgICAgICAgICA8cG9seWdvbiBpZD0iU2hhcGUiIGZpbGw9IiMyMDVCOTkiIHBvaW50cz0iMTA2Ljg2MzgxMyAzMTIuNzY1OTU3IDY0IDI5MC4xNjk5MDcgNjQgMjIuNTk2MDUwNCAxMDYuODYzODEzIDAgMTQ1IDE1Ni4zODI5NzgiPjwvcG9seWdvbj4KICAgICAgICAgICAgPHJlY3QgaWQ9IlJlY3RhbmdsZS1wYXRoIiBmaWxsPSIjMkU3M0I4IiB4PSIxMDciIHk9IjAiIHdpZHRoPSI1MyIgaGVpZ2h0PSIzMTIuNzY1OTU3Ij48L3JlY3Q+CiAgICAgICAgPC9nPgogICAgPC9nPgo8L3N2Zz4="
}
}
11 changes: 9 additions & 2 deletions src/appmixer/aws/redshift/UpdatedRow/UpdatedRow.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,18 +9,25 @@ module.exports = {
let { lastUpdated } = await context.stateGet('lastUpdated') || {};
let processedKeys = await context.stateGet('processedKeys') || {};

const sanitizedQuery = query.replace(/;$/, '');

try {
if (!lastUpdated) {
// On first tick, fetch only the most recent update to set lastUpdated
const latestRowQuery = `${query} ORDER BY ${updatedDateColumn} DESC LIMIT 1`;
const latestRowQuery = `${sanitizedQuery} ORDER BY ${updatedDateColumn} DESC LIMIT 1`;
const latestRowResult = await runQuery({ context: context.auth, query: latestRowQuery });
lastUpdated = latestRowResult.rows.length
? latestRowResult.rows[0][updatedDateColumn]
: 0;

await context.stateSet('lastUpdated', { lastUpdated });
} else {
const updatedRows = await this.checkForUpdatedRows(context, query, updatedDateColumn, lastUpdated);
const updatedRows = await this.checkForUpdatedRows(
context,
sanitizedQuery,
updatedDateColumn,
lastUpdated
);
for (const row of updatedRows) {

if (!primaryKey || !processedKeys[row[primaryKey]]) {
Expand Down
4 changes: 2 additions & 2 deletions src/appmixer/aws/redshift/UpdatedRow/component.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
"type": "textarea",
"index": 1,
"label": "SQL Query",
"tooltip": "Enter the SQL query to fetch data from your database. This query should select from the table you want to monitor for new rows. Avoid including a WHERE clause for filtering by the compare field, as this will be handled automatically to detect new entries For example, if you want to monitor a table named 'orders', your query might be SELECT * FROM orders. The component will add necessary conditions to check for new rows."
"tooltip": "Enter the SQL query to fetch data from your database. This query should select from the table you want to monitor for new rows. Avoid including a WHERE clause for filtering by the compare field, as this will be handled automatically to detect new entries For example, if you want to monitor a table named 'orders', your query might be SELECT * FROM orders. The component will add necessary conditions to check for new rows. Semicolon will be ignored."
},
"updatedDateColumn": {
"type": "text",
Expand Down Expand Up @@ -61,4 +61,4 @@
}
],
"icon": "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZyB3aWR0aD0iMjY4cHgiIGhlaWdodD0iMzE0cHgiIHZpZXdCb3g9IjAgMCAyNjggMzE0IiB2ZXJzaW9uPSIxLjEiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgeG1sbnM6eGxpbms9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkveGxpbmsiPgogICAgPHRpdGxlPmFtYXpvbi1yZWRzaGlmdDwvdGl0bGU+CiAgICA8ZyBpZD0iUGFnZS0xIiBzdHJva2U9Im5vbmUiIHN0cm9rZS13aWR0aD0iMSIgZmlsbD0ibm9uZSIgZmlsbC1ydWxlPSJldmVub2RkIj4KICAgICAgICA8ZyBpZD0iYW1hem9uLXJlZHNoaWZ0IiB0cmFuc2Zvcm09InRyYW5zbGF0ZSgwLjg0MDAwMCwgMC43MDIxMjgpIiBmaWxsLXJ1bGU9Im5vbnplcm8iPgogICAgICAgICAgICA8cG9seWdvbiBpZD0iU2hhcGUiIGZpbGw9IiM1Mjk0Q0YiIHBvaW50cz0iMjQ0LjkyNzk4OSA0My42MTcwMjEzIDI2NyA1NS4wODY0OTgzIDI2NyAyNTYuNTczOTIyIDI0NC45Mjc5ODkgMjY4LjA4NTEwNiAyMTAgMTU2LjE0MzAxNCI+PC9wb2x5Z29uPgogICAgICAgICAgICA8cG9seWdvbiBpZD0iU2hhcGUiIGZpbGw9IiMyMDVCOTkiIHBvaW50cz0iMjIuMjY4OTQxIDQzLjYxNzAyMTMgMCA1NS4wODY0OTgzIDAgMjU2LjU3MzkyMiAyMi4yNjg5NDEgMjY4LjA4NTEwNiA3OCAxNTYuMTQzMDE0Ij48L3BvbHlnb24+CiAgICAgICAgICAgIDxwb2x5Z29uIGlkPSJTaGFwZSIgZmlsbD0iIzIwNUI5OSIgcG9pbnRzPSIxMzQgMjM5LjE3MTI4IDI0NSAyNjguMDg1MTA2IDI0NSA0My42MTcwMjEzIDEzNCA3Mi41MzA4NDczIj48L3BvbHlnb24+CiAgICAgICAgICAgIDxwb2x5Z29uIGlkPSJTaGFwZSIgZmlsbD0iIzUyOTRDRiIgcG9pbnRzPSIxMzQgMjM5LjE3MTI4IDIyIDI2OC4wODUxMDYgMjIgNDMuNjE3MDIxMyAxMzQgNzIuNTMwODQ3MyI+PC9wb2x5Z29uPgogICAgICAgICAgICA8cG9seWdvbiBpZD0iU2hhcGUiIGZpbGw9IiM1Mjk0Q0YiIHBvaW50cz0iMTYwLjM0MzMxOCAzMTIuNzY1OTU3IDIwMyAyOTAuMTY5OTA3IDIwMyAyMi41OTYwNTA0IDE2MC4zNDMzMTggMCAxMTggMTQ4LjcyNjEyMiI+PC9wb2x5Z29uPgogICAgICAgICAgICA8cG9seWdvbiBpZD0iU2hhcGUiIGZpbGw9IiMyMDVCOTkiIHBvaW50cz0iMTA2Ljg2MzgxMyAzMTIuNzY1OTU3IDY0IDI5MC4xNjk5MDcgNjQgMjIuNTk2MDUwNCAxMDYuODYzODEzIDAgMTQ1IDE1Ni4zODI5NzgiPjwvcG9seWdvbj4KICAgICAgICAgICAgPHJlY3QgaWQ9IlJlY3RhbmdsZS1wYXRoIiBmaWxsPSIjMkU3M0I4IiB4PSIxMDciIHk9IjAiIHdpZHRoPSI1MyIgaGVpZ2h0PSIzMTIuNzY1OTU3Ij48L3JlY3Q+CiAgICAgICAgPC9nPgogICAgPC9nPgo8L3N2Zz4="
}
}
13 changes: 9 additions & 4 deletions src/appmixer/aws/redshift/bundle.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
{
"name": "appmixer.aws.redshift",
"version": "1.0.0",
"changelog": [
"Initial version"
]
"version": "1.0.1",
"changelog": {
"1.0.0": [
"Initial version"
],
"1.0.1": [
"NewRow, UpdatedRow: query fields now ignores semicolon at the end of the query to avoid syntax error."
]
}
}
10 changes: 7 additions & 3 deletions src/appmixer/azuredocumentintelligence/bundle.json
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
{
"name": "appmixer.azuredocumentintelligence",
"version": "1.0.2",
"version": "2.0.0",
"changelog": {
"1.0.2": [
"Initial version."
]
"Initial version."
],
"2.0.0": [
"(breaking change) ClassifyDocument split into ClassifyDocument and ClassifyDocumentFromStream. ClassifyDocumentFromStream is using only the File ID parameter, ClassifyDocument is using either File URL or Base64Source.",
"ClassifyDocument and ClassifyDocumentFromStream: added supported file extensions to the tooltip of either File ID or File URL."
]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,30 +8,19 @@ module.exports = {
async receive(context) {

const { endpoint, apiKey } = context.config;
const { classifierId, fileId, fileUrl, base64Source } = context.messages.in.content;
const { classifierId, fileUrl, base64Source } = context.messages.in.content;

const client = DocumentIntelligence(endpoint, { key: apiKey });

let options;

if (fileId) {
// Using "Classify Document From Stream" operation from "Document Classifiers" API
const fileInfo = await context.getFileInfo(fileId);
const fileStream = await context.getFileReadStream(fileId);
options = {
contentType: fileInfo.contentType || 'application/octet-stream',
body: fileStream
};
if (fileUrl) {
options = { body: { urlSource: fileUrl } };
} else {
// Using "Classify Document" operation from "Document Classifiers" API
if (fileUrl) {
options = { body: { urlSource: fileUrl } };
} else {
options = { body: { base64Source } };
}
options = { body: { base64Source } };
}

await context.log({ step: 'Classifying document', endpoint, classifierId, fileId, fileUrl, base64Source });
await context.log({ step: 'Classifying document', endpoint, classifierId, fileUrl, base64Source });
const initialResponse = await client
.path('/documentClassifiers/{classifierId}:analyze', classifierId)
.post(options);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "appmixer.azuredocumentintelligence.classifiers.ClassifyDocument",
"author": "Appmixer <[email protected]>",
"description": "Classifies document with document classifier. Use one of the following input options: <ul><li>1. <b>Appmixer File ID</b> of the document to classify.</li> <li>2. <b>URL</b> of the document to classify.</li> <li>3. <b>Base64</b> encoded source of the document to classify.</li>",
"description": "Classifies document with document classifier. Use one of the following input options: <ul><li>1. <b>URL</b> of the document to classify.</li> <li>2. <b>Base64</b> encoded source of the document to classify.</li>",
"label": "Classify Document",
"auth": {
"service": "appmixer:azuredocumentintelligence"
Expand All @@ -17,9 +17,6 @@
"classifierId": {
"type": "string"
},
"fileId": {
"type": "string"
},
"fileUrl": {
"type": "string"
},
Expand All @@ -28,7 +25,6 @@
}
},
"oneOf": [
{ "required": ["classifierId", "fileId"] },
{ "required": ["classifierId", "fileUrl"] },
{ "required": ["classifierId", "base64Source"] }
]
Expand All @@ -46,23 +42,17 @@
}
}
},
"fileId": {
"type": "filepicker",
"index": 2,
"label": "File ID",
"tooltip": "File ID of the document to classify."
},
"fileUrl": {
"type": "text",
"index": 3,
"index": 2,
"label": "File URL",
"tooltip": "URL of the document to classify."
"tooltip": "File URL of the document to classify. Supported file extensions: <strong>Documents</strong>( .pdf, .docx, .xlsx, .pptx ) <strong>Images</strong>( .jpeg, .png, .tiff, .bmp, .heif ) <strong>Web files</strong>( .html ) <strong>Binary files</strong>."
},
"base64Source": {
"type": "textarea",
"index": 4,
"index": 3,
"label": "Base64 Source",
"tooltip": "Base64 encoded source of the document to classify."
"tooltip": "Base64 encoded source of the document to classify. If <strong>File URL</strong> is provided, this field is ignored."
}
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
'use strict';

const DocumentIntelligence = require('@azure-rest/ai-document-intelligence').default;
const { isUnexpected, getLongRunningPoller } = require('@azure-rest/ai-document-intelligence');

module.exports = {

async receive(context) {

const { endpoint, apiKey } = context.config;
const { classifierId, fileId } = context.messages.in.content;

const client = DocumentIntelligence(endpoint, { key: apiKey });

const fileInfo = await context.getFileInfo(fileId);
const fileStream = await context.getFileReadStream(fileId);
const options = {
contentType: fileInfo.contentType || 'application/octet-stream',
body: fileStream
};

await context.log({ step: 'Classifying document', endpoint, classifierId, fileId });
const initialResponse = await client
.path('/documentClassifiers/{classifierId}:analyze', classifierId)
.post(options);

if (isUnexpected(initialResponse)) {
throw initialResponse.body.error;
}

const poller = await getLongRunningPoller(client, initialResponse);
const analyzeResult = (await poller.pollUntilDone()).body.analyzeResult;

await context.sendJson(analyzeResult, 'out');
}
};
Loading

0 comments on commit df58721

Please sign in to comment.