1. Decorators Functions that modify other functions. Syntactic sugar for wrapping a function. 1.1 Basic Decorator def my_decorator(func): def wrapper(*args, **kwargs): print("Something is happening before the function is called.") result = func(*args, **kwargs) print("Something is happening after the function is called.") return result return wrapper @my_decorator def say_hello(name): print(f"Hello, {name}!") say_hello("Alice") 1.2 Decorator with Arguments def repeat(num_times): def decorator_repeat(func): def wrapper(*args, **kwargs): for _ in range(num_times): func(*args, **kwargs) return wrapper return decorator_repeat @repeat(num_times=3) def greet(name): print(f"Hi {name}") greet("Bob") 2. Generators & Iterators Memory-efficient way to handle large sequences. 2.1 Generators Use yield keyword. Pause and resume execution. Create iterators. def fibonacci_generator(n): a, b = 0, 1 for _ in range(n): yield a a, b = b, a + b for num in fibonacci_generator(5): print(num) # 0, 1, 1, 2, 3 2.2 Iterators Implement __iter__() and __next__() methods. __iter__() returns self, __next__() returns next item or raises StopIteration . class MyIterator: def __init__(self, limit): self.limit = limit self.current = 0 def __iter__(self): return self def __next__(self): if self.current 3. Context Managers Simplify resource management (e.g., file handling, locks). 3.1 Using with statement with open("my_file.txt", "w") as f: f.write("Hello, world!") # File is automatically closed here 3.2 Implementing a Context Manager Class-based: Implement __enter__() and __exit__() . Function-based: Use @contextlib.contextmanager decorator. import contextlib class MyContext: def __enter__(self): print("Entering context") return self # Value bound to 'as' variable def __exit__(self, exc_type, exc_val, exc_tb): print("Exiting context") # Handle exceptions if needed, return True to suppress return False with MyContext() as mc: print("Inside context") @contextlib.contextmanager def my_context_manager(): print("Entering function context") try: yield 123 # Value bound to 'as' variable finally: print("Exiting function context") with my_context_manager() as val: print(f"F-Context: {val}") 4. Metaclasses Classes that create classes. Control class creation process. Default metaclass is type . Define __new__ and __init__ methods in the metaclass. class MyMeta(type): def __new__(mcs, name, bases, attrs): print(f"Creating class {name}") attrs['added_attribute'] = 100 return super().__new__(mcs, name, bases, attrs) class MyClass(metaclass=MyMeta): def __init__(self, value): self.value = value obj = MyClass(10) print(obj.added_attribute) # 100 5. Descriptors Objects that implement __get__ , __set__ , or __delete__ methods. Enable custom behavior for attribute access on an object. class MyDescriptor: def __init__(self, default_value): self.default_value = default_value self.data = {} def __get__(self, instance, owner): if instance is None: return self return self.data.get(instance, self.default_value) def __set__(self, instance, value): self.data[instance] = value def __delete__(self, instance): if instance in self.data: del self.data[instance] class MyClassWithDescriptor: attribute = MyDescriptor(0) obj1 = MyClassWithDescriptor() obj2 = MyClassWithDescriptor() print(obj1.attribute) # 0 obj1.attribute = 5 print(obj1.attribute) # 5 print(obj2.attribute) # 0 (instance-specific) 6. Asynchronous Programming ( asyncio ) Concurrent execution using event loop and coroutines. 6.1 Basic Coroutine import asyncio async def fetch_data(delay): print(f"Starting fetch for {delay}s") await asyncio.sleep(delay) # Simulate I/O print(f"Finished fetch for {delay}s") return f"Data after {delay}s" async def main(): task1 = asyncio.create_task(fetch_data(2)) task2 = asyncio.create_task(fetch_data(1)) # Wait for tasks to complete result1 = await task1 result2 = await task2 print(result1) print(result2) if __name__ == "__main__": asyncio.run(main()) 6.2 async with and async for For asynchronous context managers and iterators. import asyncio class AsyncContext: async def __aenter__(self): print("Async entering") await asyncio.sleep(0.1) return self async def __aexit__(self, exc_type, exc_val, exc_tb): print("Async exiting") async def async_main(): async with AsyncContext() as ac: print("Inside async context") # async for example (requires an async iterator) # class AsyncIterator: ... implements __aiter__, __anext__ # async for item in AsyncIterator(): # print(item) if __name__ == "__main__": asyncio.run(async_main()) 7. Concurrency ( threading , multiprocessing ) 7.1 threading (GIL limited) Good for I/O-bound tasks. import threading import time def task(name): print(f"Thread {name}: starting") time.sleep(1) # Simulate I/O print(f"Thread {name}: finishing") thread1 = threading.Thread(target=task, args=("One",)) thread2 = threading.Thread(target=task, args=("Two",)) thread1.start() thread2.start() thread1.join() # Wait for thread1 to complete thread2.join() # Wait for thread2 to complete print("All threads finished") 7.2 multiprocessing (Bypass GIL) Good for CPU-bound tasks. import multiprocessing import time def cpu_bound_task(name): print(f"Process {name}: starting") result = sum(i*i for i in range(1_000_000)) # Simulate CPU work print(f"Process {name}: finishing with {result}") process1 = multiprocessing.Process(target=cpu_bound_task, args=("A",)) process2 = multiprocessing.Process(target=cpu_bound_task, args=("B",)) process1.start() process2.start() process1.join() process2.join() print("All processes finished") 8. Advanced Data Structures 8.1 collections.deque Double-ended queue for efficient appends/pops from both ends. from collections import deque d = deque([1, 2, 3]) d.appendleft(0) # [0, 1, 2, 3] d.pop() # 3, d is [0, 1, 2] d.extend([4, 5]) # [0, 1, 2, 4, 5] 8.2 collections.defaultdict Provides default values for missing keys. from collections import defaultdict s = [('yellow', 1), ('blue', 2), ('yellow', 3), ('blue', 4), ('red', 1)] dd = defaultdict(list) for k, v in s: dd[k].append(v) print(dd.items()) # dict_items([('yellow', [1, 3]), ('blue', [2, 4]), ('red', [1])]) 8.3 collections.namedtuple Factory function for creating tuple subclasses with named fields. from collections import namedtuple Point = namedtuple('Point', ['x', 'y']) p = Point(11, y=22) print(p.x, p.y) # 11 22 print(p[0], p[1]) # 11 22 9. Introspection & Reflection Examining objects, modules, and classes at runtime. type(obj) : Get type of object. dir(obj) : List attributes of object. hasattr(obj, 'attr') : Check if object has attribute. getattr(obj, 'attr', default) : Get attribute value. setattr(obj, 'attr', value) : Set attribute value. isinstance(obj, Class) : Check if object is instance of class. issubclass(Sub, Super) : Check if Sub is subclass of Super. inspect module: More detailed introspection. import inspect class MyClass: def __init__(self, val): self.val = val def method(self): pass obj = MyClass(10) print(hasattr(obj, 'val')) # True print(getattr(obj, 'val')) # 10 print(inspect.isfunction(MyClass.method)) # True print(inspect.getmembers(obj)) # List all members 10. Functional Programming Concepts 10.1 map , filter , reduce map(func, iterable) : Apply function to all items. filter(func, iterable) : Filter items based on function. functools.reduce(func, iterable) : Apply rolling computation. from functools import reduce nums = [1, 2, 3, 4] squares = list(map(lambda x: x*x, nums)) # [1, 4, 9, 16] evens = list(filter(lambda x: x % 2 == 0, nums)) # [2, 4] sum_all = reduce(lambda x, y: x + y, nums) # 10 10.2 Closures Inner functions remembering values from enclosing scope. def outer_func(x): def inner_func(y): return x + y return inner_func add_five = outer_func(5) print(add_five(3)) # 8 10.3 Partial Functions Fix a certain number of arguments of a function. from functools import partial def multiply(a, b): return a * b double = partial(multiply, 2) triple = partial(multiply, b=3) print(double(5)) # 10 print(triple(5)) # 15