diff --git a/Makefile b/Makefile index 61bc422..f004507 100644 --- a/Makefile +++ b/Makefile @@ -1,8 +1,62 @@ -PLUGIN_VERSION=1.2.0 -PLUGIN_ID=salesforce - -plugin: - cat plugin.json|json_pp > /dev/null - rm -rf dist - mkdir dist - zip --exclude "*.pyc" -r dist/dss-plugin-${PLUGIN_ID}-${PLUGIN_VERSION}.zip custom-recipes parameter-sets python-connectors python-lib plugin.json +# Makefile variables set automatically +plugin_id=`cat plugin.json | python -c "import sys, json; print(str(json.load(sys.stdin)['id']).replace('/',''))"` +plugin_version=`cat plugin.json | python -c "import sys, json; print(str(json.load(sys.stdin)['version']).replace('/',''))"` +archive_file_name="dss-plugin-${plugin_id}-${plugin_version}.zip" +remote_url=`git config --get remote.origin.url` +last_commit_id=`git rev-parse HEAD` + +.DEFAULT_GOAL := plugin + +plugin: dist-clean + @echo "[START] Archiving plugin to dist/ folder..." + @cat plugin.json | json_pp > /dev/null + @mkdir dist + @echo "{\"remote_url\":\"${remote_url}\",\"last_commit_id\":\"${last_commit_id}\"}" > release_info.json + @git archive -v -9 --format zip -o dist/${archive_file_name} HEAD + @if [[ -d tests ]]; then \ + zip --delete dist/${archive_file_name} "tests/*"; \ + fi + @zip -u dist/${archive_file_name} release_info.json + @rm release_info.json + @echo "[SUCCESS] Archiving plugin to dist/ folder: Done!" + +dev: dist-clean + @echo "[START] Archiving plugin to dist/ folder... (dev mode)" + @cat plugin.json | json_pp > /dev/null + @mkdir dist + @zip -v -9 dist/${archive_file_name} -r . --exclude "tests/*" "env/*" ".git/*" ".pytest_cache/*" ".idea/*" "dist/*" + @echo "[SUCCESS] Archiving plugin to dist/ folder: Done!" + +unit-tests: + @echo "Running unit tests..." + @( \ + PYTHON_VERSION=`python3 -c "import sys; print('PYTHON{}{}'.format(sys.version_info.major, sys.version_info.minor))"`; \ + PYTHON_VERSION_IS_CORRECT=`cat code-env/python/desc.json | python3 -c "import sys, json; print('$$PYTHON_VERSION' in json.load(sys.stdin)['acceptedPythonInterpreters']);"`; \ + if [ $$PYTHON_VERSION_IS_CORRECT == "False" ]; then echo "Python version $$PYTHON_VERSION is not in acceptedPythonInterpreters"; exit 1; else echo "Python version $$PYTHON_VERSION is in acceptedPythonInterpreters"; fi; \ + ) + @( \ + rm -rf ./env/; \ + python3 -m venv env/; \ + source env/bin/activate; \ + pip install --upgrade pip;\ + pip install --no-cache-dir -r tests/python/unit/requirements.txt; \ + pip install --no-cache-dir -r code-env/python/spec/requirements.txt; \ + export PYTHONPATH="$(PYTHONPATH):$(PWD)/python-lib"; \ + pytest tests/python/unit --alluredir=tests/allure_report || ret=$$?; exit $$ret \ + ) + +integration-tests: + @echo "Running integration tests..." + @( \ + rm -rf ./env/; \ + python3 -m venv env/; \ + source env/bin/activate; \ + pip3 install --upgrade pip;\ + pip install --no-cache-dir -r tests/python/integration/requirements.txt; \ + pytest tests/python/integration --alluredir=tests/allure_report || ret=$$?; exit $$ret \ + ) + +tests: unit-tests integration-tests + +dist-clean: + rm -rf dist \ No newline at end of file diff --git a/README.md b/README.md index 12f9bff..6c208bd 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,10 @@ Documentation: https://www.dataiku.com/product/plugins/salesforce/ ### Changelog +**Version 1.2.2 (2024-05-16)** + +* New: Pagination for 2k+ records on Salesforce report dataset + **Version 1.2.1 (2020-12-03)** * Fixed: Allow empty security token diff --git a/parameter-sets/oauth-credentials/parameter-set.json b/parameter-sets/oauth-credentials/parameter-set.json index a5bdad3..3e43879 100644 --- a/parameter-sets/oauth-credentials/parameter-set.json +++ b/parameter-sets/oauth-credentials/parameter-set.json @@ -4,8 +4,8 @@ "description": "Define a preset that DSS users can use to authenticate themselves on Salesforce. The plugin will work with per-user credentials.", "icon": "icon-cloud" }, - "defaultDefinableInline": true, - "defaultDefinableAtProjectLevel": true, + "defaultDefinableInline": false, + "defaultDefinableAtProjectLevel": false, "pluginParams": [ ], diff --git a/plugin.json b/plugin.json index 4fd3ad7..38c2ab4 100644 --- a/plugin.json +++ b/plugin.json @@ -1,6 +1,6 @@ { "id": "salesforce", - "version": "1.2.1", + "version": "1.2.2", "meta": { "label": "Salesforce", "description": "Fetch data from Salesforce", diff --git a/python-connectors/salesforce-report/connector.py b/python-connectors/salesforce-report/connector.py index 930e28f..075a538 100644 --- a/python-connectors/salesforce-report/connector.py +++ b/python-connectors/salesforce-report/connector.py @@ -29,30 +29,38 @@ def generate_rows(self, dataset_schema=None, dataset_partitioning=None, partition_id=None, records_limit=-1): results = self.client.make_api_call("/services/data/v39.0/analytics/reports/%s" % self.REPORT, parameters={"includeDetails": True}) - - report_format = results.get("reportMetadata").get("reportFormat") - - if report_format != "TABULAR": - raise Exception("The format of the report is %s but the plugin only supports TABULAR." % report_format) - - columns = results.get("reportMetadata").get("detailColumns", []) - log(columns) - - log("records_limit: %i" % records_limit) - + results_to_process = True n = 0 - - for obj in results.get("factMap").get("T!T", {}).get("rows", []): - arr = obj.get("dataCells", {}) - if self.RESULT_FORMAT == 'json': - els = {} - for c, o in zip(columns, arr): - els[c] = o - row = {"json": json.dumps(els)} - else: - row = {} - for c, o in zip(columns, arr): - row[c] = o["label"] - n = n + 1 + while results_to_process: + results_to_process = False + report_format = results.get("reportMetadata").get("reportFormat") + + if report_format != "TABULAR": + raise Exception("The format of the report is %s but the plugin only supports TABULAR." % report_format) + + columns = results.get("reportMetadata").get("detailColumns", []) + log(columns) + + log("records_limit: %i" % records_limit) + + for obj in results.get("factMap").get("T!T", {}).get("rows", []): + arr = obj.get("dataCells", {}) + if self.RESULT_FORMAT == 'json': + els = {} + for c, o in zip(columns, arr): + els[c] = o + row = {"json": json.dumps(els)} + else: + row = {} + for c, o in zip(columns, arr): + row[c] = o["label"] + n = n + 1 + if records_limit < 0 or n <= records_limit: + yield row + + next_records_url = results.get('nextRecordsUrl', None) if records_limit < 0 or n <= records_limit: - yield row + if next_records_url: + results = self.client.make_api_call(next_records_url) + if results: + results_to_process = True