import_dmp - creeaza tablespace roa 1gb - creeaza useri contafin_oracle, firma - importa contafin_oracle.dmp - aplica postdbcreation.sql pe contafin_oracle (synonyme) - importa firmanoua.dmp utilizare: - se copiaza fisierele din import_dmp si se suprascriu peste cele din creare_server_scripturi - se copiaza in directorul creare_server_scripturi contafin_oracle.dmp si firmanoua.dmp - se lanseaza roa.bat contafin_oracle.dmp > 24.07.2014 firmanoua.dmp > 24.07.2014 Oracle XE 21c 1. Adauga baza de date XEPDB1 SI ROA in D:\app\roa\product\21c\homes\OraDB21Home1\network\admin\TNSNAMES.ORA XEPDB1 = (DESCRIPTION = (ADDRESS = (PROTOCOL = TCP)(HOST = AppSrv.set.local)(PORT = 1521)) (CONNECT_DATA = (SERVER = DEDICATED) (SERVICE_NAME = XEPDB1) ) ) ROA = (DESCRIPTION = (ADDRESS = (PROTOCOL = TCP)(HOST = AppSrv.set.local)(PORT = 1521)) (CONNECT_DATA = (SERVER = DEDICATED) (SERVICE_NAME = XEPDB1) ) ) 2. sqlnet.ora SQLNET.ALLOWED_LOGON_VERSION_SERVER = 11 SQLNET.ALLOWED_LOGON_VERSION_CLIENT = 11 3. sqlplus sys/romfastsoft@XEPDB1 as sysdba alter profile DEFAULT limit password_life_time UNLIMITED; @D:\app\roa\product\21c\dbhomeXE\rdbms\admin\utlmail.sql @D:\app\roa\product\21c\dbhomeXE\rdbms\admin\prvtmail.plb 4. LOG ON AS SYS@XEPDB1 CREATE OR REPLACE DIRECTORY DMPDIR AS 'C:\DMPDIR'; GRANT ALL ON DIRECTORY DMPDIR TO PUBLIC; 5. ALTER USER SYS IDENTIFIED BY SR0mfasT2024; 6. select 'INSERT INTO AUTH_SERII (ID_SERIE, ID_PROGRAM, SERIE, ID_UTIL) VALUES (SEQ_AUTH_SERII.NEXTVAL, ' || ID_PROGRAM || ', ''' || SERIE || ''', -3);' as cmd from ( select SP.serie, SP.id_program, SP.ID_CLIENT, SP.Nr_Util, SP.Data_val from vgen_seriiprog SP JOIN (SELECT ID_PROGRAM, MAX(DATAORA) AS DATAORA FROM vgen_seriiclienti t WHERE ID_CLIENT = 138 GROUP BY ID_PROGRAM) MP ON SP.ID_PROGRAM = MP.ID_PROGRAM AND SP.DATAORA = MP.DATAORA where SP.ID_CLIENT = 138 AND SP.id_program in (49, 8, 2, 21, 1, 29, 5, 6, 7, 55, 159, 11, 0, 66)); 7. begin sys.dbms_scheduler.create_job(job_name => 'CONTAFIN_ORACLE.UPDATERTVAI_ZILNIC', job_type => 'STORED_PROCEDURE', job_action => 'PACK_ROARTVAI.UPDATERTVAI', start_date => to_date('14-04-2015 04:30:00', 'dd-mm-yyyy hh24:mi:ss'), repeat_interval => 'Freq=Daily;Interval=1', end_date => to_date(null), job_class => 'DEFAULT_JOB_CLASS', enabled => true, auto_drop => false, comments => ''); end; / 8. -- logon as sys@roa create or replace procedure NewSchema(tcSchema varchar2, tnLuna number, tnAn number) is lcCommand varchar2(1000); lcSchemaSursa VARCHAR2(100) := 'FIRMANOUA'; lcSchemaDestinatie VARCHAR2(100) := tcSchema; lcDumpFile VARCHAR2(100); lcDumpDir VARCHAR2(100) := 'DMPDIR'; -- lnCount number(10); lcSql varchar2(1000); lnAn number := tnAn; lnLuna number := tnLuna; ldDataI date; ldDataF date; lnZileLucratoare number(10) := 0; lnOreLucratoare number(10) := 0; lnMaxAn number(10); lcLocatie varchar2(100) := 'CREARE FIRMA ' || tcSchema; h1 NUMBER; -- Data Pump job handle ind NUMBER; -- Loop index percent_done NUMBER; -- Percentage of job complete job_state VARCHAR2(30); -- To keep track of job state le ku$_LogEntry; -- For WIP and error messages js ku$_JobStatus; -- The job status from get_status -- jd ku$_JobDesc; -- The job description from get_status sts ku$_Status; -- The status object returned by get_status begin ------------------------------------------------------------------------ -- CREATE THE USER ------------------------------------------------------------------------ /* SELECT COUNT(1) INTO lnCount FROM dba_users WHERE username = UPPER(tcSchema); IF lnCount <> 0 THEN EXECUTE IMMEDIATE ('DROP USER ' || tcSchema || ' CASCADE'); END IF;*/ lcCommand := 'create user ' || tcSchema || ' identified by ' || CASE WHEN upper(tcSchema) like 'TEST%' THEN '"123"' ELSE 'ROMFASTSOFT' END || ' default tablespace ROA temporary tablespace TEMP profile DEFAULT'; pinfo(lcCommand, lcLocatie); execute immediate lcCommand; ------------------------------------------------------------------------ -- GRANT PRIVILEGIES ------------------------------------------------------------------------ lcCommand := 'grant connect to ' || tcSchema; execute immediate lcCommand; lcCommand := 'grant resource to ' || tcSchema; execute immediate lcCommand; lcCommand := 'grant create materialized view to ' || tcSchema; execute immediate lcCommand; lcCommand := 'grant create procedure to ' || tcSchema; execute immediate lcCommand; lcCommand := 'grant create sequence to ' || tcSchema; execute immediate lcCommand; lcCommand := 'grant create table to ' || tcSchema; execute immediate lcCommand; lcCommand := 'grant create trigger to ' || tcSchema; execute immediate lcCommand; lcCommand := 'grant create type to ' || tcSchema; execute immediate lcCommand; lcCommand := 'grant create view to ' || tcSchema; execute immediate lcCommand; lcCommand := 'grant debug connect session to ' || tcSchema; execute immediate lcCommand; lcCommand := 'grant select any table to ' || tcSchema; execute immediate lcCommand; lcCommand := 'grant unlimited tablespace to ' || tcSchema; execute immediate lcCommand; ------------------------------------------------------------------------ -- IMPORT DUMP ------------------------------------------------------------------------ -- Create a (user-named) Data Pump job to do a "full" import (everything -- in the dump file without filtering). lcDumpFile := lcSchemaSursa || '.dmp'; -- h1 := DBMS_DATAPUMP.OPEN('IMPORT', 'SCHEMA', NULL, lcSchemaDestinatie); h1 := DBMS_DATAPUMP.OPEN('IMPORT', 'SCHEMA', NULL, lcSchemaDestinatie); -- Specify the single dump file for the job (using the handle just returned) -- and directory object, which must already be defined and accessible -- to the user running this procedure. This is the dump file created by -- the export operation in the first example. -- DBMS_DATAPUMP.ADD_FILE(h1, lcDumpFile, lcDumpDir); BEGIN PINFO('DBMS_DATAPUMP.ADD_FILE LOG', lcLocatie); DBMS_DATAPUMP.ADD_FILE(handle => h1, filename => lcSchemaDestinatie || '.log', directory => 'DMPDIR', filetype => 3); PINFO('DBMS_DATAPUMP.ADD_FILE DMP', lcLocatie); DBMS_DATAPUMP.ADD_FILE(handle => h1, filename => lcDumpFile, directory => lcDumpDir, filetype => 1); -- A metadata remap will map all schema objects from HR to BLAKE. PINFO('DBMS_DATAPUMP.METADATA_REMAP', lcLocatie); DBMS_DATAPUMP.METADATA_REMAP(h1, 'REMAP_SCHEMA', lcSchemaSursa, lcSchemaDestinatie); -- If a table already exists in the destination schema, skip it (leave -- the preexisting table alone). This is the default, but it does not hurt -- to specify it explicitly. DBMS_DATAPUMP.SET_PARAMETER(h1, 'TABLE_EXISTS_ACTION', 'SKIP'); -- Start the job. An exception is returned if something is not set up properly. PINFO('DBMS_DATAPUMP.START_JOB', lcLocatie); DBMS_DATAPUMP.START_JOB(h1); ------------------------------------------------------------------------ -- The import job should now be running. In the following loop, the job is -- monitored until it completes. In the meantime, progress information is -- displayed. Note: this is identical to the export example. percent_done := 0; job_state := 'UNDEFINED'; while (job_state != 'COMPLETED') and (job_state != 'STOPPED') loop dbms_datapump.get_status(h1, dbms_datapump.ku$_status_job_error + dbms_datapump.ku$_status_job_status + dbms_datapump.ku$_status_wip, -1, job_state, sts); js := sts.job_status; -- If the percentage done changed, display the new value. if js.percent_done != percent_done then pinfo('*** Job percent done = ' || to_char(js.percent_done), lcLocatie); percent_done := js.percent_done; end if; -- If any work-in-progress (WIP) or Error messages were received for the job, -- display them. if (bitand(sts.mask, dbms_datapump.ku$_status_wip) != 0) then le := sts.wip; else if (bitand(sts.mask, dbms_datapump.ku$_status_job_error) != 0) then le := sts.error; else le := null; end if; end if; if le is not null then ind := le.FIRST; while ind is not null loop -- dbms_output.put_line(le(ind).LogText); pinfo(le(ind).LogText, lcLocatie); ind := le.NEXT(ind); end loop; end if; end loop; -- Indicate that the job finished and gracefully detach from it. -- dbms_output.put_line('Job has completed'); pinfo('Job has completed', lcLocatie); -- dbms_output.put_line('Final job state = ' || job_state); pinfo('Final job state = ' || job_state, 'CREARE FIRMA ' || lcSchemaDestinatie); ------------------------------------------------------------------------ -- POST IMPORT -- 1. actualizare calendar, sal_calendar, cote_tva -- CALENDAR lcSql := 'UPDATE ' || lcSchemaDestinatie || '.CALENDAR SET ANUL = :1, LUNA = :2, AN = TO_CHAR(:3), NL = LPAD(:4, 2, ''0'')'; PINFO(lcSql, lcLocatie); EXECUTE IMMEDIATE lcSql USING lnAn, lnLuna, lnAn, lnLuna; -- SAL_CALENDAR ldDataI := TO_DATE(lpad(lnAn, 4, '0') || lpad(lnLuna, 2, '0'), 'YYYYMM'); ldDataF := ADD_MONTHS(ldDataI, 1) - 1; lcSql := 'select ' || lcSchemaDestinatie || '.pack_personal.calculeaza_nr_zile(:1, :2) FROM DUAL'; EXECUTE IMMEDIATE lcSql INTO lnZileLucratoare USING ldDataI, ldDataF; lnOreLucratoare := lnZileLucratoare * 8; lcSql := 'UPDATE ' || lcSchemaDestinatie || '.SAL_CALENDAR SET AN = :1 , LUNA = :2, ZILELUC = :3, ORESTAS = :4, ORELUC = :5'; PINFO(lcSql, lcLocatie); EXECUTE IMMEDIATE lcSql USING lnAn, lnLuna, lnZileLucratoare, lnOreLucratoare, lnOreLucratoare; -- SAL_IMPOZITAR lcSql := 'UPDATE ' || lcSchemaDestinatie || '.SAL_IMPOZITAR SET AN = :1 , LUNA = :2'; PINFO(lcSql, lcLocatie); EXECUTE IMMEDIATE lcSql USING lnAn, lnLuna; -- PLCONT lcSql := 'SELECT max(an) as plan from ' || lcSchemaDestinatie || '.PLCONT'; PINFO(lcSql, lcLocatie); EXECUTE IMMEDIATE lcSql into lnMaxAn; if lnMaxAn <> lnAn then lcSql := 'UPDATE ' || lcSchemaDestinatie || '.PLCONT SET AN = :1 WHERE AN = :2'; PINFO(lcSql, lcLocatie); EXECUTE IMMEDIATE lcSql USING lnAn, lnMaxAn; end if; -- COTE_TVA lcSql := 'SELECT max(an*12+luna) from ' || lcSchemaDestinatie || '.cote_tva'; PINFO(lcSql, lcLocatie); EXECUTE IMMEDIATE lcSql into lnMaxAn; lcSql := 'UPDATE ' || lcSchemaDestinatie || '.COTE_TVA SET AN = :1, LUNA = :2 WHERE AN*12+LUNA = :3'; PINFO(lcSql, lcLocatie); EXECUTE IMMEDIATE lcSql USING lnAn, lnLuna, lnMaxAn; ------------------------------------------------------------------------ -- 2. ACTUALIZARE SCHEMA LA ZI PINFO('Actualizare schema la zi...', lcLocatie); CONTAFIN_ORACLE.PACK_UPDATE.UpdateROA(tnUpdateApp => 0, tnUpdateScripturi => 0, tnUpdateDatabase => 1, tcSchemaList => tcSchema); EXCEPTION WHEN OTHERS THEN PINFO(SQLERRM, lcLocatie); raise_application_error(-20000, SQLERRM); END; DBMS_DATAPUMP.DETACH(h1); end NewSchema; create or replace procedure UpdateSQLPLUS(tcSqlPlusPath in varchar2, tcScriptPath in varchar2) as lcJobName varchar2(500); begin lcJobName := 'updateschema_' || to_char(sysdate, 'YYYYMMDDHH24MISS'); dbms_scheduler.create_job(lcJobName, job_action => tcSqlPlusPath, number_of_arguments => 2, job_type => 'executable', enabled => false); dbms_scheduler.set_job_argument_value(lcJobName, 1, '/nolog'); dbms_scheduler.set_job_argument_value(lcJobName, 2, '@' || tcScriptPath); dbms_scheduler.enable(lcJobName); end; / GRANT EXECUTE ON UpdateSQLPLUS TO CONTAFIN_ORACLE; create or replace procedure ExecuteScriptOS(tcPowerShellPath in varchar2, tcScriptPath in varchar2) as lcJobName varchar2(500); begin lcJobName := 'exec_ps_' || to_char(sysdate, 'YYYYMMDDHH24MISS'); dbms_scheduler.create_job(lcJobName, job_action => tcPowerShellPath, number_of_arguments => 1, job_type => 'executable', enabled => false); dbms_scheduler.set_job_argument_value(lcJobName, 1, tcScriptPath); dbms_scheduler.enable(lcJobName); end ExecuteScriptOS; / GRANT EXECUTE ON ExecuteScriptOS TO CONTAFIN_ORACLE; 10. https://dbalifeeasy.com/tag/alter-system-set-log_archive_dest_1locationuse_db_recovery_file_dest/ show parameter db_recovery_file select * from V$RECOVERY_FILE_DEST; alter system set db_recovery_file_dest_size=30g scope=both; alter system set db_recovery_file_dest='D:\app\roa\product\21c\fast_recovery_area' scope=both; show parameter db_recovery_file alter system set log_archive_dest_1='LOCATION=USE_DB_RECOVERY_FILE_DEST' scope=both; archive log list; select * from v$flash_recovery_area_usage;