目录
创建目录 os.makedirs(output, exist_ok=True)
查看库安装路径: 当一个库第2次安装时,不再安装,自动显示已安装路径:
查看环境路径,查看python版本
python -c "import sys; print('Python路径:', sys.executable); print('Python版本:', sys.version)"
查看torch版本:
python -c "import torch; print('cuda可用:', torch.cuda.is_available(), '| cuda版本:', torch.version.cuda if torch.cuda.is_available() else '未安装')"
创建目录 os.makedirs(output, exist_ok=True)
查看库安装路径: 当一个库第2次安装时,不再安装,自动显示已安装路径:
pip install lmdb -i https://mirrors.ivolces.com/pypi/simple/
![]()
cuda环境变量
os.environ['CUDA_VISIBLE_DEVICES'] = '2'
export CUDA_VISIBLE_DEVICES=2
device = "cuda:1"
model=model.to(device)
vscode 常用开发代码
# coding=utf-8
import sys
import os
current_dir = os.path.dirname(os.path.abspath(__file__))
os.chdir(current_dir)
print('current_dir', current_dir)
paths = [current_dir, current_dir+'/../']
paths.append(os.path.join(current_dir, 'src'))
for path in paths:
sys.path.insert(0, path)
os.environ['PYTHONPATH'] = (os.environ.get('PYTHONPATH', '') + ':' + path).strip(':')
import cv2
import torch
python -c 常用 for循环:
python -c "import sys, os; paths = ['/shared_disk/users/lbg/project/4d/plf/libs', 'your_path_2']; [sys.path.insert(0, path) or os.environ.update({'PYTHONPATH': (os.environ.get('PYTHONPATH', '') + ':' + path).strip(':')}) for path in paths];import posepile; print(posepile.__path__[0])"
接收参数:
FLAGS = argparse.Namespace()
def initialize():
parser = argparse.ArgumentParser()
parser.add_argument('--input-model-path', type=str,default='/shared_disk/datasets/public_datasets/human_pose/experiments/finetune_0517')
parser.add_argument('--config-name', type=str, default='nlf_s')
# parser.parse_args(namespace=FLAGS)
args, unknown = parser.parse_known_args(namespace=FLAGS)
解析未知参数
args, unknown_args = parser.parse_known_args()
sh切换到当前目录
cd "$(dirname "$0")"
cd "$(dirname "$(realpath "$0")")"
opencv 调试可视化
import cv2;cv2.imshow("res",inf_cfg.mask_crop);cv2.waitKey()
命令行测试python代码:
python -c "import nvdiffrast.torch as dr; print(dr)"
try代码:
try:
print(int('ss'))
except Exception as e:
print('代码报错')
else:
print("没有报错")
finally:
print("无论发生什么事情,此处一定会执行")
1
7271

被折叠的 条评论
为什么被折叠?



