INSERT INTO `obpm2`.`flink_jobs` ( `id`, `name`, `job_type`, `parameter_json`, `description`, `is_online`, `last_updated_time`, `created_time`, `flink_job_id`, `flink_job_started_time`, `flink_job_started_response`, `title`, `is_deleted` ) VALUES ( 67, '捕获obpm.tenant_users-tenant-employees-baibaodunflow-tlkcompanyinformation及驻勤信息 kafka', 0, '{\n \"param1\": \"{\\\"job.name\\\":\\\"捕获obpm.tenant_users-tenant-employees-baibaodunflow-tlkcompanyinformation及驻勤信息到kafka\\\",\\\"checkpoint.mode\\\":\\\"file\\\",\\\"checkpoint.disk.location\\\":\\\"/tmp/cdc-obpm2-baibaodunflow\\\",\\\"checkpoint.hdfs.config\\\":\\\"core-site-default.xml\\\",\\\"kafka.bootstrap.server\\\":\\\"117.78.39.204:9092\\\",\\\"kafka.cdc.topic.prefix\\\":\\\"[dbName].binlog-cdc.topic.[tableName]\\\",\\\"kafka.consumer.group.id\\\":\\\"binlog-cdc-flink-platform-events\\\"}\",\n \"param2\": \"{\\\"source.name\\\":\\\"mysql-cdc-flink-source\\\",\\\"source.database.host\\\":\\\"220.243.148.23\\\",\\\"source.database.port\\\":\\\"2111\\\",\\\"source.database.username\\\":\\\"bcx\\\",\\\"source.database.password\\\":\\\"Wstestv5qy#2022\\\",\\\"source.database.serverTimeZone\\\":\\\"GMT+8\\\",\\\"source.database.serverId\\\":\\\"6001-6200\\\",\\\"source.database.connectTimeout\\\":\\\"60000\\\",\\\"source.database.db.list\\\":\\\"obpm2;baibaodunflow;dispatch;organizationauth;\\\",\\\"source.database.table.list\\\":\\\"obpm2.tenant_users;obpm2.tenant_user_credentials;obpm2.tenant_employees;obpm2.tenant_organizations;obpm2.tenant_departments;baibaodunflow.TLK_COMPANYINFORMATION;baibaodunflow.TLK_ATTENDANCE_SITE_BASE_INFO;baibaodunflow.tlk_attendance_site_person_info;baibaodunflow.tlk_attendance_site_person_info;organizationauth.tlk_DepartmentLevel;baibaodunflow.tlk_SetSupervise;dispatch.tenant_employee_attendance_records;dispatch.tlk_device_binded_log;dispatch.tencent_close_room_events;dispatch.rd_company_summary;dispatch.rd_employee_summary;dispatch.rd_security_station_summary;dispatch.tlk_org_purse_transaction;dispatch.tlk_org_purse;\\\"}\"\n}', 'flink从obpm2+baibaodunflow的binlog捕获数据推到kafka', 1, '2023-04-09 16:20:05', '2023-03-05 20:21:59', NULL, NULL, NULL, 'xxxx', 0 );