set mapreduce.job.cache.files 'hdfs://192.168.100.163:9000/user/blacklist.txt#blacklist';
register ExtractAndTransform.jar
all_weblogs = load 'hdfs://192.168.100.163:9000/user/apache_tsv.txt' as(ip:chararray,timestamp:long,page:chararray,http_status:int,payload_size:int,useragent:chararray);
nobots_weblogs = FILTER all_weblogs BY NOT com.sn.hadoop.extract.pig.IsUseragentBot(useragent);
store nobots_weblogs into 'hdfs://192.168.100.163:9000/user/nobots_weblogs_another';
1、路径对应的都是HDFS
2、'='两侧都应该有个空格