Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

PersonioClientV1: paging fixes, workarounds and calendar damage repair #207

Merged
merged 1 commit into from
Oct 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 20 additions & 15 deletions lib/PersonioClientV1.js
Original file line number Diff line number Diff line change
Expand Up @@ -81,32 +81,32 @@ class PersonioClientV1 extends UrlFetchJsonClient {
* @return JSON document "data" member from Personio.
*/
async getPersonioJson(url, options) {
const params = PersonioClientV1.parseQuery(url);
let data = [];
let offset = null;
let offset = params.offset !== undefined ? +params.offset : null;
let limit = params.limit !== undefined ? +params.limit : null;
do {
// we ensure only known Personio API endpoints can be contacted
let pathAndQuery = url;
if (offset != null || !url.includes('offset=')) {
offset = Math.floor(Math.max(offset, 0));
pathAndQuery += pathAndQuery.includes('?') ? '&offset=' + offset : '?offset=' + offset;
}
if (!url.includes('limit=')) {
pathAndQuery += pathAndQuery.includes('?') ? '&limit=' + PERSONIO_MAX_PAGE_SIZE : '?limit=' + PERSONIO_MAX_PAGE_SIZE;
if (offset != null || limit != null) {
offset = Math.floor(Math.max(+offset, 0));
limit = Math.floor(Math.max(+limit, PERSONIO_MAX_PAGE_SIZE));
params.offset = '' + offset;
params.limit = '' + limit;
}

const document = await this.getJson(pathAndQuery, options);
const finalUrl = url.split('?')[0] + PersonioClientV1.buildQuery(params);
// we ensure only known Personio API endpoints can be contacted
const document = await this.getJson(finalUrl, options);
if (!document || !document.success) {
const message = (document && document.error && document.error.message) ? document.error.message : '';
throw new Error('Error response for ' + pathAndQuery + ' from Personio' + (message ? ': ' : '') + message);
throw new Error('Error response for ' + finalUrl + ' from Personio' + (message ? ': ' : '') + message);
}

if (!document.data) {
throw new Error('Response for ' + pathAndQuery + ' from Personio doesn\'t contain data');
throw new Error('Response for ' + finalUrl + ' from Personio doesn\'t contain data');
}

if (!Array.isArray(document.data)
|| url.includes('limit=')
|| url.includes('offset=')
|| !limit
|| !document.limit) {
data = document.data;
break;
Expand All @@ -115,7 +115,12 @@ class PersonioClientV1 extends UrlFetchJsonClient {
data = data.concat(document.data);

// keep requesting remaining pages
offset = document.data.length < document.limit ? offset = null : data.length;
limit = document.limit;
offset = document.data.length < limit ? offset = null : data.length;
if (offset && url.includes('company/time-offs')) {
// special case: time-offs endpoint's offset parameters is a page index (not element index)
offset = Math.floor(offset / document.limit);
}
}
while (offset != null);

Expand Down
37 changes: 36 additions & 1 deletion lib/UrlFetchJsonClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ class UrlFetchJsonClient {
* @return {string|string}
*/
static buildQuery(params) {
const encodeQueryParam = (key, value) => encodeURIComponent(key) + '=' + encodeURIComponent(value);
const encodeQueryParam = (key, value) => encodeURIComponent(key) + (value !== undefined ? '=' + encodeURIComponent(value) : '');

const query = Object.entries(params)
.filter(([key, value]) => value !== undefined)
Expand All @@ -137,4 +137,39 @@ class UrlFetchJsonClient {

return query.length > 0 ? '?' + query : '';
};


/** Explode URL query components into an object.
*
* Expects ? prefix and splits ignores fragment part.
*
* @param url URL whose query part to convert.
* @return {object}
*/
static parseQuery(url) {
const encodeQueryParam = (key, value) => encodeURIComponent(key) + '=' + encodeURIComponent(value);

const urlParts = url.split('?');
if (urlParts.length < 2) {
return {};
}

const query = urlParts[1].split('#')[0];
return query.split('&').reduce((acc, part) => {
const parts = part.split('=');
const key = decodeURIComponent(parts[0]);
const value = parts[1] === undefined ? undefined : parts[1] === 'null' ? null : decodeURIComponent(parts[1]);
if (acc.hasOwnProperty(key)) {
const existingValue = acc[key];
if (!Array.isArray(existingValue)) {
acc[key] = [existingValue, value];
} else {
existingValue.push(value);
}
} else {
acc[key] = value;
}
return acc;
}, {});
};
}
147 changes: 147 additions & 0 deletions sync-timeoffs/SyncTimeOffs.js
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,153 @@ async function syncTimeOffs() {
if (firstError) {
throw firstError;
}

return true;
}


/** Example how to utilize the unsyncTimeOffs_() function.
*
*/
async function recoverEventsExample() {
const updateDateMin = new Date('Oct 21, 2024, 6:34:35 PM');
const updateDateMax = new Date('Oct 21, 2024, 10:50:50 PM');
const specs = [
['[email protected]', "Example P32 ⇵"],
['[email protected]', "Out - Concert ⇵"],
['[email protected]', "Vacation: ⇵"],
// ...
];

for (const [email, title] of specs) {
if (!await recoverEvents_(title, email, updateDateMin, updateDateMax)) {
break;
}
}
}

/** Utility function to recover accidentally cancelled events.
lyind marked this conversation as resolved.
Show resolved Hide resolved
*
* @note This is a destructive operation, USE WITH UTMOST CARE!
*
* @param title The event title (only events whose title includes this string are de-synced).
* @param email (optional) The email of the user whose events are to be deleted.
* @param updateDateMin (optional) Minimum last update date.
* @param updateDateMax (optional) Maximum last update date.
*/
async function recoverEvents_(title, email, updateDateMin, updateDateMax) {

const scriptLock = LockService.getScriptLock();
if (!scriptLock.tryLock(5000)) {
throw new Error('Failed to acquire lock. Only one instance of this script can run at any given time!');
}

const allowedDomains = (getScriptProperties_().getProperty(ALLOWED_DOMAINS_KEY) || '')
.split(',')
.map(d => d.trim());

const emailWhiteList = getEmailWhiteList_();
if (email) {
emailWhiteList.push(email);
}
const isEmailAllowed = email => (!emailWhiteList.length || emailWhiteList.includes(email))
&& allowedDomains.includes(email.substring(email.lastIndexOf('@') + 1));

Logger.log('Configured to handle accounts %s on domains %s', emailWhiteList.length ? emailWhiteList : '', allowedDomains);

// all timing related activities are relative to this EPOCH
const epoch = new Date();

// how far back to sync events/time-offs
const lookbackMillies = -Math.round(getLookbackDays_() * 24 * 60 * 60 * 1000);
// how far into the future to sync events/time-offs
const lookaheadMillies = Math.round(getLookaheadDays_() * 24 * 60 * 60 * 1000);

// after how many milliseconds should this script stop by itself (to avoid forced termination/unclean state)?
// 4:50 minutes (hard AppsScript kill comes at 6:00 minutes)
// stay under 5 min. to ensure termination before the next instances starts if operating at 5 min. job delay
const maxRuntimeMillies = Math.round(290 * 1000);

const fetchTimeMin = Util.addDateMillies(new Date(epoch), lookbackMillies);
fetchTimeMin.setUTCHours(0, 0, 0, 0); // round down to start of day
const fetchTimeMax = Util.addDateMillies(new Date(epoch), lookaheadMillies);
fetchTimeMax.setUTCHours(24, 0, 0, 0); // round up to end of day

const personioCreds = getPersonioCreds_();
const personio = PersonioClientV1.withApiCredentials(personioCreds.clientId, personioCreds.clientSecret);

// load and prepare list of employees to process
const employees = await personio.getPersonioJson('/company/employees').filter(employee =>
employee.attributes.status.value !== 'inactive' && isEmailAllowed(employee.attributes.email.value)
);
Util.shuffleArray(employees);

Logger.log('Recovering events with title containing "%s" between %s and %s for %s accounts', title, fetchTimeMin.toISOString(), fetchTimeMax.toISOString(), '' + employees.length);

let firstError = null;
let processedCount = 0;
for (const employee of employees) {

const email = employee.attributes.email.value;
const employeeId = employee.attributes.id.value;

// we keep operating if handling calendar of a single user fails
try {
const calendar = await CalendarClient.withImpersonatingService(getServiceAccountCredentials_(), email);

// test against dead-line first
const deadlineTs = +epoch + maxRuntimeMillies;
let now = Date.now();
if (now >= deadlineTs) {
return false;
}

const timeOffs = await queryPersonioTimeOffs_(personio, fetchTimeMin, fetchTimeMax, employeeId);
now = Date.now();
if (now >= deadlineTs) {
return false;
}

const allEvents = await queryCalendarEvents_(calendar, 'primary', fetchTimeMin, fetchTimeMax);
for (const event of allEvents) {

if ((updateDateMin && event.updated < updateDateMin) || (updateDateMax && event.updated > updateDateMax)) {
continue;
}

const timeOffId = +event.extendedProperties?.private?.timeOffId;
if (timeOffId && event.status === 'cancelled' && (event.summary || '') === title) {
let now = Date.now();
if (now >= deadlineTs) {
break;
}

if (!timeOffs.hasOwnProperty(timeOffId)) {
setEventPrivateProperty_(event, 'timeOffId', undefined);
}

event.status = 'confirmed';
await calendar.update('primary', event.id, event);
Logger.log('Recovered event "%s" at %s for user %s', event.summary, event.start.dateTime || event.start.date, email);
}
}
} catch (e) {
Logger.log('Failed to recover matching out-of-offices of user %s: %s', email, e);
firstError = firstError || e;
}
++processedCount;
}

Logger.log('Completed event recovery for %s of %s accounts', '' + processedCount, '' + employees.length);

// for completeness, also automatically released at exit
scriptLock.releaseLock();

if (firstError) {
throw firstError;
}

return true;
}


Expand Down
Loading