Go Integration
Go's standard library has first-class HTTP proxy support via the CONNECT method. For SOCKS5 use golang.org/x/net/proxy. Both approaches work with NinjasProxy residential, datacenter, and mobile endpoints.
net/http — HTTP CONNECT Proxy
package main
import (
"context"
"encoding/json"
"fmt"
"net/http"
"net/url"
"time"
)
func main() {
proxyURL := &url.URL{
Scheme: "http",
User: url.UserPassword("USERNAME", "API_KEY"),
Host: "r.ninjasproxy.com:8080",
}
transport := &http.Transport{
Proxy: http.ProxyURL(proxyURL),
}
client := &http.Client{
Transport: transport,
Timeout: 30 * time.Second,
}
resp, err := client.Get("https://api.ipify.org?format=json")
if err != nil {
panic(err)
}
defer resp.Body.Close()
var result map[string]string
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
panic(err)
}
fmt.Println("Exit IP:", result["ip"])
}
Geo-targeting with username suffix
func buildProxyURL(username, apiKey, host string, port int, country, sessionID string) *url.URL {
user := username
if country != "" {
user += "-country-" + country
}
if sessionID != "" {
user += "-session-" + sessionID
}
return &url.URL{
Scheme: "http",
User: url.UserPassword(user, apiKey),
Host: fmt.Sprintf("%s:%d", host, port),
}
}
// US sticky session
proxyURL := buildProxyURL("myuser", "mykey", "r.ninjasproxy.com", 8080, "US", "run01")SOCKS5 — golang.org/x/net/proxy
// go get golang.org/x/net/proxy
package main
import (
"context"
"fmt"
"net"
"net/http"
"time"
"golang.org/x/net/proxy"
)
func main() {
auth := &proxy.Auth{
User: "USERNAME",
Password: "API_KEY",
}
dialer, err := proxy.SOCKS5("tcp", "r.ninjasproxy.com:1080", auth, proxy.Direct)
if err != nil {
panic(err)
}
transport := &http.Transport{
DialContext: func(ctx context.Context, network, addr string) (net.Conn, error) {
return dialer.Dial(network, addr)
},
}
client := &http.Client{
Transport: transport,
Timeout: 30 * time.Second,
}
resp, err := client.Get("https://api.ipify.org?format=json")
if err != nil {
panic(err)
}
defer resp.Body.Close()
fmt.Println("Status:", resp.Status)
}
Colly Web Scraping Framework
// go get github.com/gocolly/colly/v2
package main
import (
"fmt"
"net/http"
"net/url"
"github.com/gocolly/colly/v2"
)
func main() {
proxyURL := &url.URL{
Scheme: "http",
User: url.UserPassword("USERNAME", "API_KEY"),
Host: "r.ninjasproxy.com:8080",
}
c := colly.NewCollector(
colly.AllowedDomains("example.com"),
colly.UserAgent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"),
)
// Set proxy transport
c.WithTransport(&http.Transport{
Proxy: http.ProxyURL(proxyURL),
})
c.OnHTML("title", func(e *colly.HTMLElement) {
fmt.Println("Title:", e.Text)
})
c.OnError(func(r *colly.Response, err error) {
fmt.Printf("Error on %s: %v (status %d)\n", r.Request.URL, err, r.StatusCode)
})
if err := c.Visit("https://example.com"); err != nil {
panic(err)
}
}
Goroutine Pool — Concurrent Scraping
package main
import (
"context"
"encoding/json"
"fmt"
"net/http"
"net/url"
"sync"
"time"
)
type Result struct {
URL string
IP string
Status int
Err error
}
func scrapeWorker(
ctx context.Context,
client *http.Client,
jobs <-chan string,
results chan<- Result,
wg *sync.WaitGroup,
) {
defer wg.Done()
for {
select {
case <-ctx.Done():
return
case job, ok := <-jobs:
if !ok {
return
}
result := fetchURL(ctx, client, job)
results <- result
}
}
}
func fetchURL(ctx context.Context, client *http.Client, rawURL string) Result {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, rawURL, nil)
if err != nil {
return Result{URL: rawURL, Err: err}
}
resp, err := client.Do(req)
if err != nil {
return Result{URL: rawURL, Err: err}
}
defer resp.Body.Close()
var data map[string]string
_ = json.NewDecoder(resp.Body).Decode(&data)
return Result{URL: rawURL, IP: data["ip"], Status: resp.StatusCode}
}
func main() {
proxyURL := &url.URL{
Scheme: "http",
User: url.UserPassword("USERNAME", "API_KEY"),
Host: "r.ninjasproxy.com:8080",
}
client := &http.Client{
Transport: &http.Transport{Proxy: http.ProxyURL(proxyURL)},
Timeout: 30 * time.Second,
}
urls := make([]string, 20)
for i := range urls {
urls[i] = "https://api.ipify.org?format=json"
}
const workers = 10
jobs := make(chan string, len(urls))
results := make(chan Result, len(urls))
ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second)
defer cancel()
var wg sync.WaitGroup
for i := 0; i < workers; i++ {
wg.Add(1)
go scrapeWorker(ctx, client, jobs, results, &wg)
}
for _, u := range urls {
jobs <- u
}
close(jobs)
go func() {
wg.Wait()
close(results)
}()
for result := range results {
if result.Err != nil {
fmt.Printf("ERROR %s: %v\n", result.URL, result.Err)
} else {
fmt.Printf("OK %s → IP %s (HTTP %d)\n", result.URL, result.IP, result.Status)
}
}
}
Context + Timeout Handling
package main
import (
"context"
"fmt"
"net/http"
"net/url"
"time"
)
func fetchWithTimeout(rawURL string, proxyURL *url.URL, timeout time.Duration) error {
ctx, cancel := context.WithTimeout(context.Background(), timeout)
defer cancel()
client := &http.Client{
Transport: &http.Transport{Proxy: http.ProxyURL(proxyURL)},
// Do NOT set client.Timeout when using context — context controls it
}
req, err := http.NewRequestWithContext(ctx, http.MethodGet, rawURL, nil)
if err != nil {
return fmt.Errorf("build request: %w", err)
}
resp, err := client.Do(req)
if err != nil {
// context.DeadlineExceeded if timed out
return fmt.Errorf("fetch %s: %w", rawURL, err)
}
defer resp.Body.Close()
fmt.Printf("Status %d for %s\n", resp.StatusCode, rawURL)
return nil
}
Always set a timeout via
context.WithTimeout or http.Client.Timeout. Without one, a slow or unresponsive target can hang a goroutine indefinitely.Next Steps
- Node.js integration — axios, Puppeteer, TypeScript
- Python integration — httpx async, Playwright
- Rate limits — concurrency budgeting
- API Reference — REST endpoints for session management