Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] Temp cli updates #369

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions .pnp.cjs

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Binary file not shown.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@
"@transcend-io/handlebars-utils": "^1.1.0",
"@transcend-io/internationalization": "^1.6.0",
"@transcend-io/persisted-state": "^1.0.4",
"@transcend-io/privacy-types": "^4.98.0",
"@transcend-io/privacy-types": "^4.101.0",
"@transcend-io/secret-value": "^1.2.0",
"@transcend-io/type-utils": "^1.5.0",
"bluebird": "^3.7.2",
Expand Down
169 changes: 169 additions & 0 deletions src/dedupe.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
import { readCsv } from './requests';
import groupBy from 'lodash/groupBy';
import * as t from 'io-ts';
import { logger } from './logger';
import { writeCsv } from './cron';

// yarn ts-node --transpile-only ./src/test.ts

// Variables
const DUPLICATE_FILE =
'/Users/michaelfarrell/Desktop/deputy/duplicate_requests.csv';
const DUPLICATE_OUT_FILE =
'/Users/michaelfarrell/Desktop/deputy/duplicate_out_requests.csv';

const DuplicateData = t.type({
id: t.string,
createdAt: t.string,
coreIdentifierValue: t.string,
type: t.string,
name: t.union([t.string, t.null]),
trackingType: t.string,
preferences: t.string,
});

/** Override type */
type DuplicateData = t.TypeOf<typeof DuplicateData>;

/**
* Get duplicate data
*
* @returns The data
*/
function getDuplicateData(): (DuplicateData & {
/** MarketingEmails preference */
MarketingEmails?: boolean;
/** ProductGuidance preference */
ProductGuidance?: boolean;
/** ProductInsider preference */
ProductInsider?: boolean;
/** ProductUpdates preference */
ProductUpdates?: boolean;
})[] {
// Read in duplicate data, lowercase email addresses
let duplicateData = readCsv(DUPLICATE_FILE, DuplicateData).map(
({ preferences, ...d }) => ({
...d,
...JSON.parse(preferences),
}),
);
// total rows
logger.info(`Number of rows: ${duplicateData.length}`);

// filter out deputy emails
duplicateData = duplicateData.filter(
(x) => !x.coreIdentifierValue.includes('@deputy.com'),
);
logger.info(
`Number of rows after filtering out deputy emails: ${duplicateData.length}`,
);

return duplicateData;
}

/**
* Run the file parsing
*/
function runtest(): void {
// parse data from each
const duplicateData = getDuplicateData();

// Group by coreIdentifierValue
const grouped = groupBy(duplicateData, 'coreIdentifierValue');
logger.info(`Number of unique users: ${Object.values(grouped).length}`);

const metadata = Object.entries(grouped)
.map(([key, value]) => ({
key,
sorted: value.sort(
(a, b) =>
new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime(),
),
count: value.length,
countByType: Object.entries(
groupBy(
value,
({
type,
MarketingEmails,
ProductGuidance,
ProductInsider,
name,
ProductUpdates,
}) =>
JSON.stringify({
type,
name,
ProductGuidance,
ProductInsider,
ProductUpdates,
MarketingEmails,
}),
),
).map(([type, typeValue]) => ({
...JSON.parse(type),
count: typeValue.length,
})),
}))
.map((x) => ({
...x,
numLoop: x.sorted.reduce(
(acc, curr, i) =>
i === 0 ? acc : x.sorted[i - 1].type !== curr.type ? acc + 1 : acc,
0,
),
numMarketo: x.sorted.reduce(
(acc, curr, i) =>
i === 0
? acc
: x.sorted[i - 1].MarketingEmails !== curr.MarketingEmails
? acc + 1
: acc,
0,
),
lastIsOptedOut:
x.sorted[0].type.includes('OPT_OUT') ||
(x.sorted[0].ProductGuidance === false &&
x.sorted[0].ProductInsider === false &&
x.sorted[0].ProductUpdates === false &&
x.sorted[0].MarketingEmails === false),
}))
.filter((x) => x.lastIsOptedOut || x.numLoop > 2 || x.numMarketo > 4)
.map((x) => ({
coreIdentifierValue: x.key,
timestamp: new Date().toISOString(),
...(x.lastIsOptedOut || x.numLoop > 2
? {
Marketing: false,
ProductGuidance: false,
ProductInsider: false,
ProductUpdates: false,
MarketingEmails: false,
}
: {
Marketing:
x.sorted[0].ProductGuidance ||
x.sorted[0].ProductInsider ||
x.sorted[0].ProductUpdates,
ProductGuidance: x.sorted[0].ProductGuidance,
ProductInsider: x.sorted[0].ProductInsider,
ProductUpdates: x.sorted[0].ProductUpdates,
MarketingEmails: false,
}),
}));

logger.info(
`Number of users to opt out: ${
metadata.filter((x) => x.Marketing === false).length
}`,
);

logger.info(
`Number of users to opt out of MarketingEmails only: ${
metadata.filter((x) => x.MarketingEmails === false).length
}`,
);

writeCsv(DUPLICATE_OUT_FILE, metadata);
}
runtest();
2 changes: 1 addition & 1 deletion src/preference-management/parsePreferenceManagementCsv.ts
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ export async function parsePreferenceManagementCsvWithCache(

// Read in the file
fileMetadata[file] = currentState;
await cache.setValue(fileMetadata, 'fileMetadata');
// await cache.setValue(fileMetadata, 'fileMetadata'); FIXME
const t1 = new Date().getTime();
logger.info(
colors.green(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,11 @@ export async function uploadPreferenceManagementPreferencesInteractive({
const timestamp =
metadata.timestampColum === NONE_PREFERENCE_MAP
? new Date()
: new Date(update[metadata.timestampColum!]);
: new Date(
new Date(update[metadata.timestampColum!]).getTime(),
// / -
// 11 * 60 * 60 * 1000, // FIXME
);

// Determine updates
const updates = getPreferenceUpdatesFromRow({
Expand All @@ -179,8 +183,9 @@ export async function uploadPreferenceManagementPreferencesInteractive({
})),
};
});
await preferenceState.setValue(pendingUpdates, 'pendingUpdates');
await preferenceState.setValue({}, 'failingUpdates');
// FIXME out of memoru
// await preferenceState.setValue(pendingUpdates, 'pendingUpdates');
// await preferenceState.setValue({}, 'failingUpdates');

// Exist early if dry run
if (dryRun) {
Expand Down Expand Up @@ -255,7 +260,7 @@ export async function uploadPreferenceManagementPreferencesInteractive({
error: err?.response?.body || err?.message || 'Unknown error',
};
});
await preferenceState.setValue(failingUpdates, 'failingUpdates');
// await preferenceState.setValue(failingUpdates, 'failingUpdates'); FIXME
}

total += currentChunk.length;
Expand Down
Loading
Loading