以下脚本不一定能成功运行:
1.Python脚本, 需要pysftp包
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import pysftp
import os
cnopts1 = pysftp.CnOpts()
cnopts1.hostkeys = None
local_path='c:\pysftptest.py'
remote_path='/home/abc/pytest.py'
with pysftp.Connection(host='123.123.123.123', username='abc', password='123456',cnopts=cnopts1) as sftp:
sftp.put(local_path, remote_path)
sftp.get(remote_path)
os.system('hadoop fs -put '+remote_path+' /user/hadoop/')
2. Python脚本,需要paramiko包
import paramiko
t=paramiko.Transport(('123.123.123.123',22))
t.connect(username='abc',password='123456')
sftp=paramiko.SFTPClient.from_transport(t) # sftp = t.open_sftp_client()
#sftp.get(...)
#sftp.put(...)
3.shell 脚本
#!/bin/bash
#!/bin/sh
echo “OK, starting now…”
cd /home/ExtractedData
pwd
sftp -b /dev/stdin user@ip_address <<EOF
cd Desktop/my_folder
pwd
get filename.txt
!sleep 30
!pwd
!hadoop fs -put /home/username/Ext