Mambavision installation

Ref: https://github.com/NVlabs/MambaVision/issues/86 - Docker image: pytorch/pytorch:2.5.1-cuda12.4-cudnn9-devel - pip install causal-conv1d==1.5.0.post5 - wget https://github.com/state-spaces/mamba/releases/download/v2.2.3.post2/mamba_ssm-2.2.3.post2+cu12torch2.5cxx11abiFALSE-cp311-cp311-linux_x86_64.whl - pip install mamba_ssm-2.2.3.post2+cu12torch2.5cxx11abiFALSE-cp311-cp311-linux_x86_64.whl - git clone https://github.com/NVlabs/MambaVision.git - cd MambaVision - Replace setup.py #40 line: mamba-ssm==2.2.4" with "mamba-ssm" - pip install -e .

Ubuntu cli - check mac address & change network config

1-1) ip -br link show up 1-2) ifconfig 1-3) nmcli con show 2-1) sudo nmcli con mod "Wired connection X" ipv4.addresses 000.000.000.000/YY     X: Check nmcli con show     Y: Subnet mask         8: 255.0.0.0 (large networks)         16: 255.255.0.0 (medium networks)         24: 255.255.255.0 (home or small networks)         30: 255.255.255.252 (point-to-point links) 2-2) sudo nmcli con mod "Wired connection X" ipv4.gateway 000.000.0.0 2-3) sudo nmcli con mod "Wired connection X" ipv4.dns "0.0.0.0 0.0.0.0" 2-4) sudo nmcli con mod "Wired connection X" ipv4.method manual 2-5) sudo nmcli con mod "Wired connection X" ipv4.dhcp-client-id "" 2-6) sudo nmcli con down "Wired connection X" && sudo nmcli con up "Wired connection X" 3-1) sudo nano /etc/netplan/01-network-manager-all.yaml network:   version: 2   renderer: networkd   ethernet...

deformable detr make + pytorch docker -> cusolverDn.h: No such file or directory

 export PATH=/usr/local/cuda-11.6/bin/:$PATH

sklearn tsne + matplotlib scatter

import os import torch import clip import matplotlib . pyplot as plt from PIL import Image from sklearn . manifold import TSNE from tqdm import tqdm from collections import defaultdict from random import shuffle   X = torch . cat ( X , dim = 0 ) X_emb = TSNE ( init = "pca" , perplexity = 30.0 ). fit_transform ( X . cpu ()) labels = [ "source" , "z_star/foggy-1" , "z_star/foggy-2" , "z_star/foggy-3" , "z_bar/foggy-1" , "z_bar/foggy-2" , "z_bar/foggy-3" ] fig , ax = plt . subplots ( 1 ) group_len = len ( zs ) for i in range ( 7 ): ax . scatter ( X_emb [ i * group_len :( i + 1 )* group_len , 0 ], X_emb [ i * group_len :( i + 1 )* group_len , 1 ], label = labels [ i ], s = 4 ) ax . legend () fig .savefig( "temp.png" )

load bert pretrained weight to detr encoder

# Example python script of loading BERT-base model to DETR # Create DETR import argparse from models import build_model from util.default_args import get_args_parser parser = argparse.ArgumentParser(parents=[get_args_parser()]) args = parser.parse_known_args()[0] args.model = "detr" args.hidden_dim = 768 args.dim_feedforward = 3072 args.lr = 1e-4 args.lr_backbone = 1e-5 args.num_queries = 100 args.enc_layers = 12 args.nheads = 12 model, criterion, postprocessors = build_model(args) # Load BERT import torch bert = torch.hub.load('huggingface/pytorch-transformers', 'model', 'bert-base-uncased') bert_enc = bert.encoder.state_dict() # Convert keys dict_bert2detr = {} for i in range(args.enc_layers):     key = "layers.{}.self_attn.in_proj_weight".format(i)     dict_bert2detr[key] = torch.cat([bert_enc["layer.{}.attention.self.query.weight".format(i)], [bert_enc["layer.{}.attention.self.key.weight".format(i)], [bert_enc["la...

matplotlib.pyplot non-interative backend

 export MPLBACKEND=agg

VS Code 실행 파일 기준으로 경로 설정하기

 "cwd": "${fileDirname}"