Goldengate Replication – Oracle To Bigdata

GOLDENGATE

Oracle to Hdfs

############# Project Information ###############

#target
pbigdata001 10.34.81.67 bigdata 
/bigdata_disk/gg/

##########################################################

GGSCI (knebdp01.ceofg.local) 4> view param EXTBD01

EXTRACT EXTBD01
--REGISTER EXTRACT EXTBD01 DATABASE
--ADD EXTRACT EXTBD01,INTEGRATED TRANLOG, BEGIN NOW
--ADD EXTTRAIL ./dirdat/bigdata01/kn EXTRACT EXTBD01, MEGABYTES 1024
CACHEMGR CACHESIZE 2048MB, CACHEDIRECTORY ./dirtmp, CACHEDIRECTORY ./dirtmp2
--TRANLOGOPTIONS INTEGRATEDPARAMS (max_sga_size 200, parallelism 4)
--TRANLOGOPTIONS DBLOGREADER
--TRANLOGOPTIONS ASMUSER sys@ASM,ASMPASSWORD sys1234
SETENV (ORACLE_SID="DBCARX")
SETENV (NLS_LANG = "AMERICAN_AMERICA.AL32UTF8")
SETENV (ORACLE_HOME="/u01/app/oracle/product/11.2.0/dbhome_1")
userid GGS_ADMIN@DBCARX, password "123456"
--ABORTONFATALERROR
-- due to existence of unused columns
--DBOPTIONS ALLOWUNUSEDCOLUMN
-- to reset statistics for every send report command
--STATOPTIONS, REPORTFETCH, RESETREPORTSTATS
-- Record count every X minutes
REPORTCOUNT EVERY 5 MINUTES, rate
REPORT AT 00:00
REPORTROLLOVER ON SUNDAY
WARNLONGTRANS 2H, CHECKINTERVAL 1H
-- Bounded Recovery
--BR BRINTERVAL 4H
--BR BROFF
--- This parameter reports DDL being replicated.
--- This is very useful for troubleshooting.
--DDLOPTIONS ADDTRANDATA, REPORT
--- Capture DDL changes so that the target replicat can update metadata --DDLOPTIONS GETREPLICATES
--ddlerror _SKIPDDL 7972746
--GETTRUNCATES
--GETUPDATEBEFORES
--GETBEFORECOLS
--GETUPDATEAFTERS
-- Just in case we can't process a record we'll dump info here
DISCARDFILE ./dirrpt/EXTBD01.dsc, APPEND, MEGABYTES 100
-- This is the Trail to where we output
EXTTRAIL ./dirdat/bigdata01/kn
-- Long Running Trx Infos
WARNLONGTRANS 4H, CHECKINTERVAL 30M
-- This is the Heartbeat table
--include ./dirprm/HB_Extract.inc
TABLE ARBOR.CDR_INFO;
TABLE ARBOR.CDR_INFO_DUC;

############# Source , pump a section ###############
#dbcarx

EXTRACT PBDATA
--ADD EXTRACT PBDATA exttrailsource ./dirdat/bigdata01/kn
--ADD RMTTRAIL /bigdata_disk/gg/dirdat/dbcarx_new/k1 EXTRACT PBDATA, MEGABYTES 600
-- Statistics and Report information
STATOPTIONS, RESETREPORTSTATS, REPORTFETCH
REPORTCOUNT EVERY 5 MINUTES, RATE
REPORT AT 00:00
REPORTROLLOVER ON SUNDAY
CACHEMGR CACHESIZE 2048MB, CACHEDIRECTORY ./dirtmp
-- Discard File Information
DISCARDFILE ./dirrpt/PBDATA.dsc, APPEND, MEGABYTES 100
userid GGS_ADMIN@dbcarx, password "123456"
-- This is the Trail to where we output
RMTHOST 10.34.81.67, MGRPORT 7809, TCPBUFSIZE 10000000, TCPFLUSHBYTES 10000000, COMPRESS
RMTTRAIL /bigdata_disk/gg/dirdat/dbcarx_new/k1
-- Heartbeat table
--nopassthru
--include ./dirprm/HB_pmp.inc
--passthru
FLUSHSECS 10
TABLE ARBOR.CDR_INFO;
TABLE ARBOR.CDR_INFO_DUC;

############# Source , pump b section ###############

#bigdata
GGSCI (pbigdata001) 1> view param ./GLOBALS

–>

#bigdata
edit param ./GLOBALS
ALLOWOUTPUTDIR /bigdata_disk/gg/dirdat/dbcarx_new
#dbcarx
dblogin userid GGS_ADMIN@dbcarx, password "123456"
ADD EXTRACT PBDATA exttrailsource ./dirdat/bigdata01/kn
ADD RMTTRAIL /bigdata_disk/gg/dirdat/dbcarx_new/k1 EXTRACT PBDATA, MEGABYTES 600
info PBDATA_1
alter PBDATA extseqno 1559 extrba 0
start PBDATA
info all
#bigdata
[fgencali@pbigdata001 GoldenGate12]$ cd /bigdata_disk/gg/dirdat/dbcarx_new/
[fgencali@pbigdata001 dbcarx_new]$ ls -lrt
total 160608
drwxr-xr-x 2 fgencali ugroup_bigdata_prod 4096 Jun 25 12:22 k1
-rw-r----- 1 fgencali ugroup_bigdata_prod 163807461 Jun 25 15:17 k1000000000
############# Defgen Parameter File #############
#Source gate (oracle)
#gate home
[ggate@knebdp01 dirprm]$ pwd
/ggate2/ggate12c/dirprm
cat defgen.prm
#DEFSFILE /ggate/GoldenGate12_2/dirdef/iccb.def, PURGE, FORMAT RELEASE 11.2
#userid GGS_ADMIN@ICCBCCB, password AACAAAAAAAAAAAIAXETGCCSCYFRAAAOG, encryptkey default
DEFSFILE /ggate2/ggate12c/dirdef/dbcarx.def, PURGE
userid GGS_ADMIN@dbcarx, password "123456"

TABLE ARBOR.CDR_INFO;
TABLE ARBOR.CDR_INFO_DUC;
############# Defgen Send #############
#Source gate (oracle)
#gate home
[ggate@knebdp01 ggate12c]$ pwd
cd /ggate2/ggate12c
./defgen paramfile ./dirprm/defgen.prm

[ggate@knebdp01 ggate12c]$ ls -lrt /ggate2/ggate12c/dirdef/dbcarx.def
-rw-r----- 1 ggate oinstall 14392 Jun 25 20:50 /ggate2/ggate12c/dirdef/dbcarx.def
------------
#Source gate (oracle)
#gate home
#download
cp /ggate2/ggate12c/dirdef/dbcarx.def /tmp/
chmod 777 /tmp/dbcarx.def
#[ggate@knebdp01 ggate12c]$ scp /tmp/dbcarx.def [email protected]:/tmp/

cd /tmp/
lcd C:\Users\fatih.gencali_ca\Desktop\fga2\temp
get dbcarx.def
#Target gate (bigdata,evam)
#gate home
#cd dirdef
#upload
cd /tmp/
lcd C:\Users\fatih.gencali_ca\Desktop\fga2\temp
put dbcarx.def

[fgencali@pbigdata001 ~]$ alias gate
alias gate='/data/disk02/ggate/GoldenGate12/ggsci'

cd /data/disk02/ggate/GoldenGate12/dirdef
[fgencali@pbigdata001 dirdef]$ cp /tmp/dbcarx.def .

############# Target , replicat ###############

edit param param RHDFKN01

REPLICAT RHDFKN01
TARGETDB LIBFILE libggjava.so SET property=dirprm/knn01hdfs.props
SOURCEDEFS ./dirdef/dbcarx.def
MAP ARBOR.CDR_INFO,TARGET ARBOR.CDR_INFO;
MAP ARBOR.CDR_INFO_DUC,TARGET ARBOR.CDR_INFO_DUC;
--add replicat RHDFKN01, exttrail /bigdata_disk/gg/dirdat/dbcarx_new/k1

add replicat RHDFKN01, exttrail ./dirdat/dbcarx_new/k1
start RHDFKN01
info RHDFKN01
status RHDFKN01
#It will be processed
GGSCI (pbigdata001) 2> info RHDFKN01
REPLICAT RHDFKN01 Last Started 2019-06-26 16:33 Status RUNNING
Checkpoint Lag 11:42:29 (updated 00:00:00 ago)
Process ID 28415
Log Read Checkpoint File /bigdata_disk/gg/dirdat/dbcarx_new/k1000000010
2019-06-26 05:03:56.001246 RBA 190731966
.
#It was processed
[fgencali@pbigdata001 dbcarx_new]$ hdfs dfs -ls /data/domain/billing
Found 2 items
drwxr-xr-x - fgencali hive 0 2019-06-26 13:33 /data/domain/billing/arbor.cdr_info
drwxr-xr-x - fgencali hive 0 2019-06-26 13:33 /data/domain/billing/arbor.cdr_info_duc
#Info
#/data/domain/billing bigdata hdfs section
#/bigdata_disk/gg/dirdat/dbcarx_new/k1 oracle goldengate trails

edit param dirprm/knn01hdfs.props
*************************************
gg.handlerlist=hdfs
gg.handler.hdfs.type=hdfs
gg.handler.hdfs.includeTokens=false
gg.handler.hdfs.maxFileSize=2g
gg.handler.hdfs.rootFilePath=/data/domain/billing/dbcarx
gg.handler.hdfs.fileRollInterval=0
gg.handler.hdfs.inactivityRollInterval=0
gg.handler.hdfs.partitionByTable=true
gg.handler.hdfs.rollOnMetadataChange=true
gg.handler.hdfs.authType=kerberos
gg.handler.hdfs.kerberosKeytabFile=/home/fgencali/fgencali.keytab
[email protected]
gg.handler.hdfs.fileRollInterval=15m
gg.handler.hdfs.format=delimitedtext
gg.handler.hdfs.format.includeColumnNames=true
gg.handler.hdfs.format.pkUpdateHandling=update
gg.handler.hdfs.mode=tx
goldengate.userexit=utc
goldengate.userexit.writers=javawriter
javawriter.stats.display=TRUE
javawriter.stats.full=TRUE
gg.log=log4j
gg.log.level=INFO
gg.report.time=30sec
javawriter.bootoptions=-Xmx8192m -Xms512m -XX:+UnlockExperimentalVMOption -XX:+UseG1GC -XX:MaxGCPauseMillis=50 -XX:+ParallelRefProcEnabled -XX:-ResizePLAB -XX:ParallelGCThreads=20 -XX:G1NewSizePercent=5 -Djava.class.path=ggjava/ggjava.jar
gg.classpath=.:/opt/cloudera/parcels/CDH/lib/hadoop/*:/opt/cloudera/parcels/CDH/lib/hadoop-hdfs/lib/*:/opt/cloudera/parcels/CDH/lib/hadoop/client/*:/opt/cloudera/parcels/CDH/lib/hadoop/*:/etc/hadoop/conf/*:/opt/cloudera/parcels/CDH/lib/hadoop/lib/*:/etc/hadoop/conf/*:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/bin/../conf:/opt/cloudera/java/jdk1.8.0_131/lib/tools.jar:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/bin/..:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/bin/../lib/*:/etc/hadoop/conf:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/libexec/../../hadoop/lib/*:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/libexec/../../hadoop/.//*:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/libexec/../../hadoop-hdfs/./:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/libexec/../../hadoop-hdfs/lib/*:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/libexec/../../hadoop-hdfs/.//*:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/libexec/../../hadoop-yarn/lib/*:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/libexec/../../hadoop-yarn/.//*:/opt/cloudera/parcels/CDH/lib/hadoop-mapreduce/lib/*:/opt/cloudera/parcels/CDH/lib/hadoop-mapreduce/.//*:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/bin/../lib/hadoop/lib/*:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/bin/../lib/zookeeper/*:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/bin/../lib/zookeeper/lib/*:/etc/hadoop/conf:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/libexec/../../hadoop/lib/*:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/libexec/../../hadoop/.//*:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/libexec/../../hadoop-hdfs/./:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/libexec/../../hadoop-hdfs/lib/*:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/libexec/../../hadoop-hdfs/.//*:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/libexec/../../hadoop-yarn/lib/*:/opt/cloudera/parcels/CDH-5.13.2-1.cdh5.13.2.p0.3/lib/hadoop/libexec/../../hadoop-yarn/.//*:/opt/cloudera/parcels/CDH/lib/hadoop-mapreduce/lib/*:/opt/cloudera/parcels/CDH/lib/hadoop-mapreduce/.//*:ggjava/ggjava.jar:/data/disk02/ggate/GoldenGate12/Cloudera_HiveJDBC41_2.5.16.1044/*

-------

About Fatih Gençali

- I have supported as Oracle and Nosql & Bigdata Dba for more than 9 years. - I worked in 24x7 production and test environment. - I have 12C OCP certificate. - I have europass diploma supplement. - Saving operations - I have supported for nosql databases (mongo,cassandra,couchbase) - I have supported for ambari&mapr hadoop distributions - I have couchbase certificate. - I have supported databases that are telecommunication , banking, insurance, financial, retail and manufacturing, marketing, e-invoicing . - Providing aligment between prod , prp , stb , dev - Providing management and performance tuning for app and database machines (linux) - Performance tuning and sql tuning - Consolidations, Migration (expdp,xtts,switchover vb...) , installation, patch , upgrade , dataguard , shell script writing , backup restore , exadata management , performans management , security management ,goldengate operations - Resolving performance and security problems for databases and linux machines - I managed oracle 10g/11g/12c databases (dev/test/prp/snap/prod/stby) on Linux/HP/AIX/Solaris O.S - Pl/sql operations , supported shell script, (for aligments and others) - Providing highly available it (software-hardware) systems, especially database systems. - Managing and monitoring availabilities and operations of all systems . - Goldengate operations (oracle to oracle , oracle to bigdata (hdfs , kafka)) - Exadata operations (cell management,upgrade,switchover) - My work processes is according to itil. - Preparing automation for everything to reduce human resource requirement and routine [email protected] https://www.linkedin.com/in/fatih-gençali-22131131/

Leave a Reply

Your email address will not be published. Required fields are marked *