Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use transformer differently #48

Merged
merged 2 commits into from
Mar 29, 2018
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
131 changes: 66 additions & 65 deletions tap_salesforce/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,33 +56,33 @@ def resume_syncing_bulk_query(sf, catalog_entry, job_id, state, counter):
schema = catalog_entry['schema']

# Iterate over the remaining batches, removing them once they are synced
with Transformer(pre_hook=transform_bulk_data_hook) as transformer:
for batch_id in batch_ids[:]:
for rec in bulk.get_batch_results(job_id, batch_id, catalog_entry):
counter.increment()
for batch_id in batch_ids[:]:
for rec in bulk.get_batch_results(job_id, batch_id, catalog_entry):
counter.increment()
with Transformer(pre_hook=transform_bulk_data_hook) as transformer:
rec = transformer.transform(rec, schema)
rec = fix_record_anytype(rec, schema)
singer.write_message(
singer.RecordMessage(
stream=(
stream_alias or stream),
record=rec,
version=stream_version,
time_extracted=start_time))

# Update bookmark if necessary
replication_key_value = replication_key and singer_utils.strptime_with_tz(rec[replication_key])
if replication_key_value and replication_key_value <= start_time and replication_key_value > current_bookmark:
current_bookmark = singer_utils.strptime_with_tz(rec[replication_key])

state = singer.write_bookmark(state,
catalog_entry['tap_stream_id'],
'JobHighestBookmarkSeen',
singer_utils.strftime(current_bookmark))
batch_ids.remove(batch_id)
LOGGER.info("Finished syncing batch %s. Removing batch from state.", batch_id)
LOGGER.info("Batches to go: %d", len(batch_ids))
singer.write_state(state)
rec = fix_record_anytype(rec, schema)
singer.write_message(
singer.RecordMessage(
stream=(
stream_alias or stream),
record=rec,
version=stream_version,
time_extracted=start_time))

# Update bookmark if necessary
replication_key_value = replication_key and singer_utils.strptime_with_tz(rec[replication_key])
if replication_key_value and replication_key_value <= start_time and replication_key_value > current_bookmark:
current_bookmark = singer_utils.strptime_with_tz(rec[replication_key])

state = singer.write_bookmark(state,
catalog_entry['tap_stream_id'],
'JobHighestBookmarkSeen',
singer_utils.strftime(current_bookmark))
batch_ids.remove(batch_id)
LOGGER.info("Finished syncing batch %s. Removing batch from state.", batch_id)
LOGGER.info("Batches to go: %d", len(batch_ids))
singer.write_state(state)

return counter

Expand Down Expand Up @@ -116,56 +116,57 @@ def sync_records(sf, catalog_entry, state, counter):
start_time = singer_utils.now()

LOGGER.info('Syncing Salesforce data for stream %s', stream)
with Transformer(pre_hook=transform_bulk_data_hook) as transformer:
for rec in sf.query(catalog_entry, state):
counter.increment()

for rec in sf.query(catalog_entry, state):
counter.increment()
with Transformer(pre_hook=transform_bulk_data_hook) as transformer:
rec = transformer.transform(rec, schema)
rec = fix_record_anytype(rec, schema)
singer.write_message(
singer.RecordMessage(
stream=(
stream_alias or stream),
record=rec,
version=stream_version,
time_extracted=start_time))
rec = fix_record_anytype(rec, schema)
singer.write_message(
singer.RecordMessage(
stream=(
stream_alias or stream),
record=rec,
version=stream_version,
time_extracted=start_time))

replication_key_value = replication_key and singer_utils.strptime_with_tz(rec[replication_key])
replication_key_value = replication_key and singer_utils.strptime_with_tz(rec[replication_key])

if sf.pk_chunking:
if replication_key_value and replication_key_value <= start_time and replication_key_value > chunked_bookmark:
# Replace the highest seen bookmark and save the state in case we need to resume later
chunked_bookmark = singer_utils.strptime_with_tz(rec[replication_key])
state = singer.write_bookmark(
state,
catalog_entry['tap_stream_id'],
'JobHighestBookmarkSeen',
singer_utils.strftime(chunked_bookmark))
singer.write_state(state)
# Before writing a bookmark, make sure Salesforce has not given us a
# record with one outside our range
elif replication_key_value and replication_key_value <= start_time:
if sf.pk_chunking:
if replication_key_value and replication_key_value <= start_time and replication_key_value > chunked_bookmark:
# Replace the highest seen bookmark and save the state in case we need to resume later
chunked_bookmark = singer_utils.strptime_with_tz(rec[replication_key])
state = singer.write_bookmark(
state,
catalog_entry['tap_stream_id'],
replication_key,
rec[replication_key])
'JobHighestBookmarkSeen',
singer_utils.strftime(chunked_bookmark))
singer.write_state(state)

# Tables with no replication_key will send an
# activate_version message for the next sync
if not replication_key:
singer.write_message(activate_version_message)
state = singer.write_bookmark(
state, catalog_entry['tap_stream_id'], 'version', None)

# If pk_chunking is set, only write a bookmark at the end
if sf.pk_chunking:
# Write a bookmark with the highest value we've seen
# Before writing a bookmark, make sure Salesforce has not given us a
# record with one outside our range
elif replication_key_value and replication_key_value <= start_time:
state = singer.write_bookmark(
state,
catalog_entry['tap_stream_id'],
replication_key,
singer_utils.strptime(chunked_bookmark))
rec[replication_key])
singer.write_state(state)

# Tables with no replication_key will send an
# activate_version message for the next sync
if not replication_key:
singer.write_message(activate_version_message)
state = singer.write_bookmark(
state, catalog_entry['tap_stream_id'], 'version', None)

# If pk_chunking is set, only write a bookmark at the end
if sf.pk_chunking:
# Write a bookmark with the highest value we've seen
state = singer.write_bookmark(
state,
catalog_entry['tap_stream_id'],
replication_key,
singer_utils.strptime(chunked_bookmark))

def fix_record_anytype(rec, schema):
"""Modifies a record when the schema has no 'type' element due to a SF type of 'anyType.'
Expand Down