CREATE TABLE [dbo].[Import_PZ_Job_Log](
[RowNumber] int IDENTITY(1,1),
[StartDateTime] [datetime] NULL,
[EndDateTime] [datetime] NULL,
[ReturnMessage] [varchar](5000) NULL,
[Status] [varchar](50) NULL
)
CREATE TABLE [dbo].[pz_import](
[rq] [date] NULL,
[zflh] [nvarchar](50) NULL,
[yt] [nvarchar](50) NULL,
[je] [decimal](18, 2) NULL,
[skdwmc] [nvarchar](50) NULL,
[yhzh] [nvarchar](50) NULL,
[khyh] [nvarchar](50) NULL,
[yskmdm] [nvarchar](50) NULL,
[gnflmc] [nvarchar](50) NULL,
[ysxmdm] [nvarchar](50) NULL,
[ysxmmc] [nvarchar](50) NULL,
[jjfldm] [nvarchar](50) NULL,
[jjflmc] [nvarchar](50) NULL,
[zflxdm] [nvarchar](50) NULL,
[zflx] [nvarchar](50) NULL,
[zjxzdm] [nvarchar](50) NULL,
[zjxz] [nvarchar](50) NULL
)
-- To allow advanced options to be changed.
EXEC sp_configure 'show advanced options', 1
GO
-- To update the currently configured value for advanced options.
RECONFIGURE
GO
-- To enable the feature.
EXEC sp_configure 'xp_cmdshell', 1
GO
-- To update the currently configured value for this feature.
RECONFIGURE
GO
CREATE PROCEDURE [dbo].[Import_PZ_Execute] AS
BEGIN
-- Initialise parameters
DECLARE
@SSISPackageName varchar(200),
@SSISPackagConfig varchar(200),
@SSISPackagePath varchar(200),
@BackupPath varchar(200),
@BackupFolder varchar(200),
@ImportFileFolder varchar(200),
@StartDateTime datetime,
@EndDateTime datetime,
@ReturnValue int,
@ReturnMessage varchar(5000),
@Status varchar(50),
@DBconnection varchar(50)
-- Start Date Time needs to be records here as well
SET @ImportFileFolder='D:\SSIS Project\import_pz\files\'
SET @StartDateTime = GETDATE()
SET @SSISPackagConfig='config.dtsConfig'
SET @SSISPackagePath='D:\SSIS Project\import_pz\import_pz\'
SET @SSISPackageName='import_pz.dtsx'
SET @BackupPath='D:\SSIS Project\import_pz\backup\'
-- Execute the DTS Package
DECLARE @Command nvarchar(4000)
CREATE TABLE #CommandOutput
(
[RowNumber] int IDENTITY(1,1),
[Output] varchar(255) NULL
)
SET @ReturnMessage = ''
SET @Command ='dtexec /f "'+@SSISPackagePath+@SSISPackageName+'" /conf "'+@SSISPackagePath+@SSISPackagConfig+'" /Reporting "E"'
-- Include Debuigging information
SET @ReturnMessage = 'Executing Command: ' + CAST(@Command as varchar(4000)) + char(13) + char(13)
INSERT INTO #CommandOutput
EXECUTE @ReturnValue = master.dbo.xp_cmdshell @Command
SELECT @ReturnMessage = @ReturnMessage + [Output] + CHAR(13)
FROM #CommandOutput
WHERE
RowNumber > 2 AND -- Ignore the first line
[Output] IS NOT NULL AND
[Output] NOT LIKE 'DTSRun OnProgress:%' AND -- Exclude DTS Progress counters
REPLACE(REPLACE(LTRIM(RTRIM([Output])), CHAR(13), ''), CHAR(10), '') NOT LIKE '' -- Exclude blank lines
DROP TABLE #CommandOutput
-- Update Status
IF @ReturnValue = 0 SET @Status = 'Success'
ELSE SET @Status = 'Failure'
SET @EndDateTime = GETDATE()
insert into Import_PZ_Job_Log
SELECT @StartDateTime, @EndDateTime,@ReturnValue, @ReturnMessage, @Status
--make backup
--create backup folder
set @BackupFolder=CONVERT(varchar(20),@StartDateTime,112) + REPLACE(CONVERT(varchar(5),@StartDateTime,108),':','')
set @Command='mkdir "'+@BackupPath+@BackupFolder+'"'
EXECUTE master.dbo.xp_cmdshell @Command
--move file into backup folder
set @Command='move "'+@ImportFileFolder+'*.txt" "' +@BackupPath+@BackupFolder+'"'
EXECUTE master.dbo.xp_cmdshell @Command
END
|
ALTER PROCEDURE [dbo].[Queue_Jobs_ExecutePendingProcesses] AS
BEGIN
-- Initialise parameters
DECLARE @QueuedOperationID int,
@SSISPackageName varchar(200),
@GlobalVariables varchar(4000),
@StartDateTime datetime,
@EndDateTime datetime,
@ReturnValue int,
@ReturnMessage varchar(5000),
@Status varchar(50),
@SSIServer varchar(50),
@SSIServerName varchar(50),
@QueuedjobID int,
@64Bit int,
@DTEXEC_OPTIONS varchar (255)
-- Start Date Time needs to be records here as well
SET @StartDateTime = GETDATE()
exec @DTEXEC_OPTIONS= BISLCentral.BIServiceManagement.dbo.GetEnvironmentVariable 'DTEXEC_OPTIONS';
-- Get the next operation pending execution
SET @SSIServer=dbo.GetEnvironmentVariable('BISLThisNodeServer');
EXECUTE BISLCentral.BIServiceManagement.dbo.Queue_Operations_GetNextPendingInQueue @SSIServer,@QueuedOperationID output, @SSISPackageName output,@SSIServerName output,@QueuedjobID output,@64Bit output
if @QueuedOperationID IS null return
--get the SSISServer name where SSIS packages are stored node server
exec @SSIServerName=BISLCentral.BIServiceManagement.dbo.GetEnvironmentVariable @SSIServerName
-- Get the parameters, LoggingOnly paramaters are NOT returned
exec @GlobalVariables = BISLCentral.BIServiceManagement.dbo.GetVariablesStingSSIS @QueuedOperationID
-- Execute the DTS Package
DECLARE @Command nvarchar(4000)
CREATE TABLE #CommandOutput
(
[RowNumber] int IDENTITY(1,1),
[Output] varchar(255) NULL
)
SET @ReturnMessage = ''
-- check for 64/32 bit SSIS invoker
DECLARE @SSISPATH varchar(50) ='64BitSSIS'
if @64Bit=0
set @SSISPATH='32BitSSIS'
SET @Command = CONVERT(nvarchar(4000), dbo.GetEnvironmentVariable(@SSISPATH)+'DTEXEC /Ser '+@SSIServerName+' /SQ "' + @SSISPackageName + '" ' + @GlobalVariables + ' /CHECKPOINTING OFF /REPORTING '+@DTEXEC_OPTIONS)
-- Include Debuigging information
SET @ReturnMessage = 'Executing Command: ' + CAST(@Command as varchar(4000)) + char(13) + char(13)
INSERT INTO #CommandOutput
EXECUTE @ReturnValue = master.dbo.xp_cmdshell @Command
SELECT @ReturnMessage = @ReturnMessage + [Output] + CHAR(13)
FROM #CommandOutput
WHERE RowNumber > 2 AND -- Ignore the first line
[Output] IS NOT NULL AND
[Output] NOT LIKE 'DTSRun OnProgress:%' AND -- Exclude DTS Progress counters
REPLACE(REPLACE(LTRIM(RTRIM([Output])), CHAR(13), ''), CHAR(10), '') NOT LIKE '' -- Exclude blank lines
DROP TABLE #CommandOutput
-- Update Status
IF @ReturnValue = 0 SET @Status = 'Success'
ELSE SET @Status = 'Failure'
SET @EndDateTime = GETDATE()
EXEC BISLCentral.BIServiceManagement.dbo.Queue_Operations_Update @QueuedOperationID, @StartDateTime, @EndDateTime, @ReturnValue, @ReturnMessage, @Status
END
|
CREATE TABLE Dim_Date_toby(
DateKey int NOT NULL,
FullDateAlternateKey date NOT NULL,
DayNumberOfWeek int NOT NULL,
DayNameOfWeek nvarchar2(10) NOT NULL,
DayNumberOfMonth int NOT NULL,
DayNumberOfYear int NOT NULL,
WeekNumberOfYear int NOT NULL,
MonthName nvarchar2(10) NOT NULL,
MonthNumberOfYear int NOT NULL,
CalendarQuarter int NOT NULL,
CalendarYear int NOT NULL,
CalendarSemester int NOT NULL
)
set serveroutput on format wraped;
DECLARE
MAXRECORDS CONSTANT INT:=10;
new_date date;
DATEKEY int;
DAYNUMBEROFWEEK int;
DAYNAMEOFWEEK NVARCHAR2(10);
DAYNUMBEROFMONTH int ;
DAYNUMBEROFYEAR int ;
WEEKNUMBEROFYEAR int ;
MONTHNAME NVARCHAR2(10) ;
MONTHNUMBEROFYEAR int;
CALENDARQUARTER int ;
CALENDARYEAR int;
CALENDARSEMESTER int;
begin
FOR I IN 1..MAXRECORDS LOOP
new_date:=current_date+I;
DATEKEY:=to_number(TO_CHAR(new_date, 'YYYYMMDD'));
DayNumberOfWeek :=to_number(TO_CHAR(new_date, 'D'));
DayNameOfWeek :=(TO_CHAR(new_date, 'DY'));
DayNumberOfMonth :=to_number(TO_CHAR(new_date, 'DD'));
DayNumberOfYear :=to_number(TO_CHAR(new_date, 'DDD'));
WeekNumberOfYear :=to_number(TO_CHAR(new_date, 'WW'));
MonthName :=(TO_CHAR(new_date, 'MONTH'));
MonthNumberOfYear :=to_number(TO_CHAR(new_date, 'MM'));
CalendarQuarter :=to_number(TO_CHAR(new_date, 'Q'));
CalendarYear :=to_number(TO_CHAR(new_date, 'YYYY'));
IF(CalendarQuarter=1 OR CalendarQuarter=2)THEN
CalendarSemester:=1;
ELSE
CalendarSemester:=2;
END IF;
--dbms_output.put_line(CalendarSemester);
insert into Dim_Date_toby values
(DateKey,
new_date,
DayNumberOfWeek ,
DayNameOfWeek ,
DayNumberOfMonth ,
DayNumberOfYear ,
WeekNumberOfYear ,
MonthName ,
MonthNumberOfYear,
CalendarQuarter,
CalendarYear ,
CalendarSemester );
end loop;
dbms_output.put_line('done!');
commit;
end;
|
package com.infosys.cvs;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
import java.util.Properties;
import javax.swing.text.html.HTMLDocument.Iterator;
public class ReadFile {
Properties p;
public ReadFile(String filePath) throws IOException {
InputStream in = ReadFile.class.getClassLoader().getResourceAsStream(
filePath);
p = new Properties();
p.load(in);
}
public String getColumnDefault(String columnName) {
if (columnName == null) {
return null;
}
String value = p.getProperty(columnName);
String str = null;
try {
str = new String(value.getBytes("ISO8859-1"), "UTF-8");
} catch (UnsupportedEncodingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return str;
}
public List<Entry<Object, Object>> iterator() {
java.util.Iterator<Entry<Object, Object>> it = p.entrySet().iterator();
List<Entry<Object, Object>> list = new ArrayList<Entry<Object, Object>>();
while (it.hasNext()) {
Entry<Object, Object> entry = (Entry<Object, Object>) it.next();
list.add(entry);
// logger.info(entry.getKey()+" : "+entry.getValue());
}
return list;
}
public static void main(String[] args) throws IOException{
String filePath = "DB Connection.TXT";
ReadFile config = new ReadFile(filePath);
String DB_URL = config.getColumnDefault("url");
String DB_USERNAME = config.getColumnDefault("user");
String DB_PASSWORD = config.getColumnDefault("password");
System.out.println(DB_USERNAME);
}
}
|
package org.apache.hadoop.examples;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
public class HDFSTest {
//在指定位置新建一个文件,并写入字符
public static void WriteToHDFS(String file, String words) throws IOException, URISyntaxException
{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(file), conf);
Path path = new Path(file);
FSDataOutputStream out = fs.create(path); //创建文件
//三个方法都用于文件写入,好像一般多使用后者两者
//out.writeBytes(words);
//out.writeUTF(words);
out.write(words.getBytes("UTF-8"));
out.close();
//如果是要从输入流中写入,或是从一个文件写到另一个文件(此时用输入流打开已有内容的文件)
//可以使用如下IOUtils.copyBytes方法。
//FSDataInputStream in = fs.open(new Path(args[0]));
//IOUtils.copyBytes(in, out, 4096, true) //4096为一次复制块大小,true表示复制完成后关闭流
}
public static void ReadFromHDFS(String file) throws IOException
{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(file), conf);
Path path = new Path(file);
FSDataInputStream in = fs.open(path);
IOUtils.copyBytes(in, System.out, 4096, true);
//使用FSDataInoutStream的read方法会将文件内容读取到字节流中并返回
/**
* FileStatus stat = fs.getFileStatus(path);
// create the buffer
byte[] buffer = new byte[Integer.parseInt(String.valueOf(stat.getLen()))];
is.readFully(0, buffer);
is.close();
fs.close();
return buffer;
*/
}
public static void DeleteHDFSFile(String file) throws IOException
{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(file), conf);
Path path = new Path(file);
//查看fs的delete API可以看到三个方法。deleteonExit实在退出JVM时删除,下面的方法是在指定为目录是递归删除
fs.delete(path,true);
fs.close();
}
public static void UploadLocalFileHDFS(String src, String dst) throws IOException
{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
Path pathDst = new Path(dst);
Path pathSrc = new Path(src);
fs.copyFromLocalFile(pathSrc, pathDst);
fs.close();
}
public static void ListDirAll(String DirFile) throws IOException
{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(DirFile), conf);
Path path = new Path(DirFile);
FileStatus[] status = fs.listStatus(path);
//方法1
for(FileStatus f: status)
{
System.out.println(f.getPath().toString());
System.out.println(f.getOwner().toString());
}
//方法2
Path[] listedPaths = FileUtil.stat2Paths(status);
for (Path p : listedPaths){
System.out.println(p.toString());
}
}
public static void main(String [] args) throws IOException, URISyntaxException
{
//下面做的是显示目录下所有文件
//ListDirAll("hdfs://10.221.62.68:9000/home/oracli/hadoop/workspace/input");
String fileWrite = "hdfs://10.221.62.68:9000/home/oracli/hadoop/workspace/input/CN.txt";
String words="邱俊超 This words is to write into file!\n";
//WriteToHDFS(fileWrite, words);
//这里我们读取fileWrite的内容并显示在终端
ReadFromHDFS(fileWrite);
//这里删除上面的fileWrite文件
//DeleteHDFSFile(fileWrite);
//假设本地有一个uploadFile,这里上传该文件到HDFS
//String LocalFile = "D://CN.txt";
//UploadLocalFileHDFS(LocalFile, fileWrite);
}
}
|