AWS DMS - PostgreSQL invalid input value for enum and daterange - postgresql
I'm performing a logical replication from our PostgreSQL server to AWS RDS, using the database migration tool (DMS). Most tables were migrated successfully, but I'm having troubles with a couple of the tables.
When I run the task to load the table, I get this error on the logs of RDS:
ERROR: invalid input value for enum property_type: ""
CONTEXT: unnamed portal parameter $17 = ''
STATEMENT: INSERT INTO "public"."loans"("id","account_id","loan_number","created_at","updated_at","folio","mers_min","mers_status","mers_status_date","application_number","servicer","servicer_loan_number","status","primary_borrower_last_name","primary_borrower_first_name","property_number_of_units","property_type","property_address_line1","property_address_line2","property_city","property_state","property_zip","property_county_code","property_country_code","property_census_tract_code","property_parcel_id","mortgage_type","qm_loan","purpose","ltv","amortization_type","amount","interest_rate","term","lien_priority","application_date","approval_date","rejected_date","closing_date","funding_date","purchase_date","source","officer","processor","underwriter","appraiser","property_usage","fha_case_number","var_payload","approval_type","approval_message","housing_expense_ratio","total_debt_expense_ratio","subordinate_financing_amount","combined_ltv","property_appraised_value","property_purchase_price","property_appraised_date","property_year_built","credit_score","au_type","au_recommendation","lender_product","heloc_indicator","reverse_indicator","property_pud_indicator","closer","additional_financing_amount","rejected_reason","mortgage_insurance_certificate_number","mortgage_insurance_coverage_amount","mortgage_insurance_premium","day_one_certainty","first_payment_date","maturity_date","principal_and_interest_payment_amount","is_portfolio","financing_concessions_amount","sales_concessions_amount","transaction_costs_amount","is_investment_quality","application_received_date","lender_program","refinance_cash_out_type") values ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52,$53,$54,$55,$56,$57,$58,$59,$60,$61,$62,$63,$64,$65,$66,$67,$68,$69,$70,$71,$72,$73,$74,$75,$76,$77,$78,$79,$80,$81,$82,$83,$84)
The type of column property_type is a nullable enum that takes as values Condominium, Cooperative, ManufacturedHome, SingleFamily, Townhouse and TwoToFourFamily.
I'm experiencing a similar issue with another table from the same database, where I get:
ERROR: malformed range literal: ""
DETAIL: Missing left parenthesis or bracket.
CONTEXT: unnamed portal parameter $3 = ''
STATEMENT: INSERT INTO "public"."selections_sampling_data"("id","sow_id","period","loan_number","field_data","selected_on","substituted_on","substitution_for_id","received_for_review_on","created_at","updated_at","selected_for","selection_reason","selections_sampling_strategy_id") values ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14)
This one doesn't explicitly tell me which column generated the error, but parameter $3 should correspond with the period column, which is a daterange type.
In both cases, the DMS task seems to be querying for an empty string as value, which is not allowed by those data types.
I don't know if this is an error with my schema, or how I configured the DMS task.
[Update]
My task configuration is the following:
{
"StreamBufferSettings": {
"StreamBufferCount": 3,
"CtrlStreamBufferSizeInMB": 5,
"StreamBufferSizeInMB": 8
},
"ErrorBehavior": {
"FailOnNoTablesCaptured": true,
"ApplyErrorUpdatePolicy": "LOG_ERROR",
"FailOnTransactionConsistencyBreached": false,
"RecoverableErrorThrottlingMax": 1800,
"DataErrorEscalationPolicy": "SUSPEND_TABLE",
"ApplyErrorEscalationCount": 0,
"RecoverableErrorStopRetryAfterThrottlingMax": true,
"RecoverableErrorThrottling": true,
"ApplyErrorFailOnTruncationDdl": false,
"DataTruncationErrorPolicy": "LOG_ERROR",
"ApplyErrorInsertPolicy": "LOG_ERROR",
"EventErrorPolicy": "IGNORE",
"ApplyErrorEscalationPolicy": "LOG_ERROR",
"RecoverableErrorCount": -1,
"DataErrorEscalationCount": 0,
"TableErrorEscalationPolicy": "STOP_TASK",
"RecoverableErrorInterval": 5,
"ApplyErrorDeletePolicy": "IGNORE_RECORD",
"TableErrorEscalationCount": 0,
"FullLoadIgnoreConflicts": true,
"DataErrorPolicy": "LOG_ERROR",
"TableErrorPolicy": "SUSPEND_TABLE"
},
"ValidationSettings": {
"ValidationPartialLobSize": 0,
"PartitionSize": 10000,
"RecordFailureDelayLimitInMinutes": 0,
"SkipLobColumns": false,
"FailureMaxCount": 10000,
"HandleCollationDiff": false,
"ValidationQueryCdcDelaySeconds": 0,
"ValidationMode": "ROW_LEVEL",
"TableFailureMaxCount": 1000,
"RecordFailureDelayInMinutes": 5,
"MaxKeyColumnSize": 8096,
"EnableValidation": true,
"ThreadCount": 5,
"RecordSuspendDelayInMinutes": 30,
"ValidationOnly": false
},
"TTSettings": {
"TTS3Settings": null,
"TTRecordSettings": null,
"EnableTT": false
},
"FullLoadSettings": {
"CommitRate": 1000,
"StopTaskCachedChangesApplied": false,
"StopTaskCachedChangesNotApplied": false,
"MaxFullLoadSubTasks": 2,
"TransactionConsistencyTimeout": 600,
"CreatePkAfterFullLoad": false,
"TargetTablePrepMode": "DO_NOTHING"
},
"TargetMetadata": {
"ParallelApplyBufferSize": 0,
"ParallelApplyQueuesPerThread": 0,
"ParallelApplyThreads": 0,
"TargetSchema": "",
"InlineLobMaxSize": 0,
"ParallelLoadQueuesPerThread": 0,
"SupportLobs": true,
"LobChunkSize": 64,
"TaskRecoveryTableEnabled": false,
"ParallelLoadThreads": 0,
"LobMaxSize": 0,
"BatchApplyEnabled": true,
"FullLobMode": true,
"LimitedSizeLobMode": false,
"LoadMaxFileSize": 0,
"ParallelLoadBufferSize": 0
},
"BeforeImageSettings": null,
"ControlTablesSettings": {
"historyTimeslotInMinutes": 5,
"HistoryTimeslotInMinutes": 5,
"StatusTableEnabled": false,
"SuspendedTablesTableEnabled": false,
"HistoryTableEnabled": false,
"ControlSchema": "",
"FullLoadExceptionTableEnabled": false
},
"LoopbackPreventionSettings": null,
"CharacterSetSettings": null,
"FailTaskWhenCleanTaskResourceFailed": false,
"ChangeProcessingTuning": {
"StatementCacheSize": 50,
"CommitTimeout": 1,
"BatchApplyPreserveTransaction": true,
"BatchApplyTimeoutMin": 1,
"BatchSplitSize": 0,
"BatchApplyTimeoutMax": 30,
"MinTransactionSize": 1000,
"MemoryKeepTime": 60,
"BatchApplyMemoryLimit": 500,
"MemoryLimitTotal": 1024
},
"ChangeProcessingDdlHandlingPolicy": {
"HandleSourceTableDropped": true,
"HandleSourceTableTruncated": true,
"HandleSourceTableAltered": true
},
"PostProcessingRules": null
}
So, after digging through AWS documentation and contacting support, I learned that enum data types don't migrate using DMS. The task will fail no matter what.
If your database has enum columns, you need to find another solution, i.e. barman and run it on an EC2 instance.
Related
Flutter - Jitsi Meet I want to add new options or at least modify an option
I'm trying to add new option like invite more people not by link but with a call notification. anyone solved this issue ?? please help.
you can use feature flages like below Map<FeatureFlag, Object> featureFlags = { FeatureFlag.isMeetingNameEnabled: false, FeatureFlag.isServerUrlChangeEnabled: true, FeatureFlag.isChatEnabled: false, FeatureFlag.isAddPeopleEnabled: false, FeatureFlag.areSecurityOptionsEnabled: false, FeatureFlag.isCalendarEnabled: false, FeatureFlag.isCloseCaptionsEnabled: false, FeatureFlag.isFilmstripEnabled: false, FeatureFlag.isHelpButtonEnabled: false, FeatureFlag.isInviteEnabled: false, FeatureFlag.isLiveStreamingEnabled: false, FeatureFlag.isLobbyModeEnabled: false, FeatureFlag.isOverflowMenuEnabled: false, FeatureFlag.isReactionsEnabled: false, FeatureFlag.isRaiseHandEnabled: false, FeatureFlag.isRecordingEnabled: false, FeatureFlag.isReplaceParticipantEnabled: false, }; var options = JitsiMeetingOptions( isVideoMuted: true, roomNameOrUrl: name, userDisplayName: prefs.getString('username')!, serverUrl: 'https://**********', featureFlags: featureFlags, );
Can Protractor ignores the timeouts of 3rd party plugin in angular?
I'm using amchart to show my data analytics in the Angular app. If I run an E2E test on that page that contains the amchart plugin, it's not able to finish (script timeout) cause it using real-time updates for charts (dynamic)... This command 'getAllAngularTestabilities()' in console shows that has been pendingMacrotasks on page, so if the Protractor not working here, it's totally okay. [Testability] 0: Testability taskTrackingZone: TaskTrackingZoneSpec {name: "TaskTrackingZone", microTasks: Array(0), macroTasks: Array(3), eventTasks: Array(474), properties: {…}} _callbacks: [] _didWork: true _isZoneStable: true _ngZone: NgZone hasPendingMacrotasks: true hasPendingMicrotasks: false isStable: true lastRequestAnimationFrameId: -1 nativeRequestAnimationFrame: ƒ requestAnimationFrame() onError: EventEmitter_ {_isScalar: false, observers: Array(1), closed: false, isStopped: false, hasError: false, …} onMicrotaskEmpty: EventEmitter_ {_isScalar: false, observers: Array(1), closed: false, isStopped: false, hasError: false, …} onStable: EventEmitter_ {_isScalar: false, observers: Array(2), closed: false, isStopped: false, hasError: false, …} onUnstable: EventEmitter_ {_isScalar: false, observers: Array(1), closed: false, isStopped: false, hasError: false, …} shouldCoalesceEventChangeDetection: false I have 3 charts on that page, so I checked what NgZone says: Coming to a request in every second, and I can't turn off them. I'm trying to find the solution in amchart's documentation but I haven't found anything yet... ZONE pending tasks= (3) [ZoneTask, ZoneTask, ZoneTask] 0: ZoneTask callback: ƒ () cancelFn: undefined creationLocation: Error: Task 'macroTask' from 'setTimeout'. at TaskTrackingZoneSpec.push.FGvd.TaskTrackingZoneSpec.onScheduleTask (http://localhost:4200/vendor.js:54102:40) at ZoneDelegate.scheduleTask (http://localhost:4200/polyfills.js:9471:55) at Object.onScheduleTask (http://localhost:4200/polyfills.js:9365:69) at ZoneDelegate.scheduleTask (http://localhost:4200/polyfills.js:9471:55) at Zone.scheduleTask (http://localhost:4200/polyfills.js:9303:47) at Zone.scheduleMacroTask (http://localhost:4200/polyfills.js:9326:29) at scheduleMacroTaskWithCurrentZone (http://localhost:4200/polyfills.js:10227:29) at http://localhost:4200/polyfills.js:11679:34 at proto.<computed> (http://localhost:4200/polyfills.js:10542:52) at loop_1 (http://localhost:4200/vendor.js:23731:42) data: {isPeriodic: false, delay: 1000, args: Arguments(2), handleId: 1516} invoke: ƒ () runCount: 0 scheduleFn: ƒ scheduleTask(task) source: "setTimeout" type: "macroTask" _state: "notScheduled" _zone: Zone {_parent: Zone, _name: "angular", _properties: {…}, _zoneDelegate: ZoneDelegate} _zoneDelegates: null state: (...) zone: (...) __proto__: Object 1: ZoneTask {_zone: Zone, runCount: 0, _zoneDelegates: null, _state: "notScheduled", type: "macroTask", …} 2: ZoneTask {_zone: Zone, runCount: 0, _zoneDelegates: null, _state: "notScheduled", type: "macroTask", …} length: 3 __proto__: Array(0) UPDATE! I can avoid this problem with a tiny workaround. Need to create a function that using runOutsideAngular(), and if I create the chart inside the callback, no will be running macrotasks! constructor(#Inject(PLATFORM_ID) private platformId, private zone: NgZone) { } // Run the function only in the browser browserOnly(f: () => void): void { if (isPlatformBrowser(this.platformId)) { this.zone.runOutsideAngular(() => { f(); }); } } ngOnInit(): void { this.browserOnly(() => { this.chart = am4core.create('line-chart-placeholder', am4charts.XYChart); }); }
yes, you can ignore it. You need to disable main protractor's feature that waits for page to be ready, like this await browser.waitForAngularEnabled(false) for more info, read here
How do I import this large JSON into a Firestore database, using Google Cloud Functions?
I have a large JSON file (around 110MB). I am trying to import it into my Firestore database to be used within my React application. How would I go about doing this? I have already tried using this code: const admin = require('firebase-admin'); const functions = require('firebase-functions'); const serviceAccount = require('./serviceaccount.json'); const datafile = './DIID.json'; const cors = require('cors')({ origin: true }); admin.initializeApp({ credential: admin.credential.cert(serviceAccount), databaseURL: "https://databaseurl.firebaseio.com" }); const runtimeOpts = { timeoutSeconds: 540, memory: '2GB' }; exports.setData = functions.runWith(runtimeOpts).https.onRequest((request, response) => { cors(request, response, () => { return admin.database().ref('Destiny').push(datafile) .then(() => { console.log("Done"); }) }) }); But my function always times out after the specified 540 seconds and there is no data written to the database. Here is an example of my JSON structure: "423789": { "displayProperties": { "description": "PARADISE LOST: A group of castaways seek to overthrow the divine order.", "name": "Mythos Hack 4.1", "icon": "/common/destiny2_content/icons/47849b754551cc0f4f03acd7554882eb.jpg", "hasIcon": true }, "collectibleHash": 4220193450, "backgroundColor": { "colorHash": 0, "red": 0, "green": 0, "blue": 0, "alpha": 0 }, "screenshot": "/common/destiny2_content/screenshots/423789.jpg", "itemTypeDisplayName": "Gauntlets", "uiItemDisplayStyle": "", "itemTypeAndTierDisplayName": "Rare Gauntlets", "displaySource": "", "action": { "verbName": "Dismantle", "verbDescription": "", "isPositive": false, "requiredCooldownSeconds": 0, "requiredItems": [], "progressionRewards": [], "actionTypeLabel": "shard", "rewardSheetHash": 0, "rewardItemHash": 0, "rewardSiteHash": 0, "requiredCooldownHash": 0, "deleteOnAction": true, "consumeEntireStack": false, "useOnAcquire": false } } This is repeated over and over again for lots of different items. I have tried using a real-time database but the file size exceeds the maximum quota for file sizes, and I have also tried various parser's to periodically write the file to the database but I couldn't seem to get that working either. Here are links to some things I have tried so far: https://bigcodenerd.org/import-export-data-firestore/ Import large data (json) into Firebase periodically Any help will be appreciated, if there is a better solution to my issue I am willing to change it up and approach it differently.
Sails-mongo automatic failover
I'm using sails-mongo in my nodejs application, and my config looks like this: mongoDb: { adapter: 'sails-mongo', url: 'mongodb://prod_user:prod_password#router-1-incloud:16888,router-2-incloud:16888/db_name', auto_reconnect: true, poolSize: parseInt(process.env.MONGO_POOL_LIMIT) || 10, w: parseInt(process.env.MONGO_WRITE_CONCERT_INT) || 'majority', reconnectInterval: 200, wtimeout: parseInt(process.env.MONGO_WRITE_TIMEOUT_MS) || 2000, retryMiliSeconds: 200, numberOfRetries: 3, readPreference: process.env.MONGO_READ_PREFERENCE || 'primaryPreferred', socketOptions: { noDelay: true, keepAlive: 0, connectTimeoutMS: parseInt(process.env.MONGO_CONNECT_TIMEOUT_MS) || 2000, socketTimeoutMS: parseInt(process.env.MONGO_SOCKET_TIMEOUT_MS) || 2000 } So when router-1-incloud:16888 goes down driver doesn't switch to router-2-incloud:16888 even when I restart my application, it always needs both routers to be available or it won't establish connection to mongo. Is there a config value I need to adjust so it automatically switches to any available host?
Generating a Road Network by Code
I am currently trying to generate a road. I wrote the following code based on the API and the procedure for generating a network in the process modelling library. MarkupSegmentLine rs = new MarkupSegmentLine(-40, 125, 0, 5000, 125, 0 ); Road entryRoad = new Road(this, SHAPE_DRAW_2D3D, true, 1, 1, 0, dodgerBlue, rs); presentation.add(entryRoad); When I run the code, I get a null pointer exception. The code compiles when the presentation call is added, but is that actually the error? Should I be using another function, such as draw()?
You are almost right... you need to just add the road into a network that defines how your roads are configured. RoadNetwork littleNetwork = new RoadNetwork(this, "littleNetwork", SHAPE_DRAW_2D3D, true, true, ROAD_RIGHT_HAND, 3.5, gray, white, ROAD_LINE_SINGLE_DASHED, white, ROAD_LINE_SINGLE, true); MarkupSegmentLine rs = new MarkupSegmentLine(-40, 125, 0, 5000, 125, 0 ); Road entryRoad = new Road(this, SHAPE_DRAW_2D3D, true, 1, 1, 0, dodgerBlue, rs); littleNetwork.addAll(entryRoad); presentation.add(entryRoad);